| line | stmt | bran | cond | sub | pod | time | code | 
| 1 |  |  |  |  |  |  | /* | 
| 2 |  |  |  |  |  |  | Copyright (C) 2015-2017 Alexander Borisov | 
| 3 |  |  |  |  |  |  |  | 
| 4 |  |  |  |  |  |  | This library is free software; you can redistribute it and/or | 
| 5 |  |  |  |  |  |  | modify it under the terms of the GNU Lesser General Public | 
| 6 |  |  |  |  |  |  | License as published by the Free Software Foundation; either | 
| 7 |  |  |  |  |  |  | version 2.1 of the License, or (at your option) any later version. | 
| 8 |  |  |  |  |  |  |  | 
| 9 |  |  |  |  |  |  | This library is distributed in the hope that it will be useful, | 
| 10 |  |  |  |  |  |  | but WITHOUT ANY WARRANTY; without even the implied warranty of | 
| 11 |  |  |  |  |  |  | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | 
| 12 |  |  |  |  |  |  | Lesser General Public License for more details. | 
| 13 |  |  |  |  |  |  |  | 
| 14 |  |  |  |  |  |  | You should have received a copy of the GNU Lesser General Public | 
| 15 |  |  |  |  |  |  | License along with this library; if not, write to the Free Software | 
| 16 |  |  |  |  |  |  | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | 
| 17 |  |  |  |  |  |  |  | 
| 18 |  |  |  |  |  |  | Author: lex.borisov@gmail.com (Alexander Borisov) | 
| 19 |  |  |  |  |  |  | */ | 
| 20 |  |  |  |  |  |  |  | 
| 21 |  |  |  |  |  |  | #include "myhtml/tokenizer_script.h" | 
| 22 |  |  |  |  |  |  |  | 
| 23 |  |  |  |  |  |  |  | 
| 24 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 25 |  |  |  |  |  |  | { | 
| 26 | 0 | 0 |  |  |  |  | while (html_offset < html_size) | 
| 27 |  |  |  |  |  |  | { | 
| 28 | 0 | 0 |  |  |  |  | if(html[html_offset] == '<') { | 
| 29 | 0 |  |  |  |  |  | token_node->element_begin = (tree->global_offset + html_offset); | 
| 30 |  |  |  |  |  |  |  | 
| 31 | 0 |  |  |  |  |  | html_offset++; | 
| 32 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_LESS_THAN_SIGN; | 
| 33 |  |  |  |  |  |  |  | 
| 34 | 0 |  |  |  |  |  | break; | 
| 35 |  |  |  |  |  |  | } | 
| 36 |  |  |  |  |  |  |  | 
| 37 | 0 |  |  |  |  |  | html_offset++; | 
| 38 |  |  |  |  |  |  | } | 
| 39 |  |  |  |  |  |  |  | 
| 40 | 0 |  |  |  |  |  | return html_offset; | 
| 41 |  |  |  |  |  |  | } | 
| 42 |  |  |  |  |  |  |  | 
| 43 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_less_than_sign(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 44 |  |  |  |  |  |  | { | 
| 45 | 0 | 0 |  |  |  |  | if(html[html_offset] == '/') | 
| 46 |  |  |  |  |  |  | { | 
| 47 | 0 |  |  |  |  |  | html_offset++; | 
| 48 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_END_TAG_OPEN; | 
| 49 |  |  |  |  |  |  | } | 
| 50 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '!') | 
| 51 |  |  |  |  |  |  | { | 
| 52 | 0 |  |  |  |  |  | html_offset++; | 
| 53 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPE_START; | 
| 54 |  |  |  |  |  |  | } | 
| 55 |  |  |  |  |  |  | else { | 
| 56 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 57 |  |  |  |  |  |  | } | 
| 58 |  |  |  |  |  |  |  | 
| 59 | 0 |  |  |  |  |  | return html_offset; | 
| 60 |  |  |  |  |  |  | } | 
| 61 |  |  |  |  |  |  |  | 
| 62 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escape_start(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 63 |  |  |  |  |  |  | { | 
| 64 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') { | 
| 65 | 0 |  |  |  |  |  | html_offset++; | 
| 66 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPE_START_DASH; | 
| 67 |  |  |  |  |  |  | } | 
| 68 |  |  |  |  |  |  | else { | 
| 69 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 70 |  |  |  |  |  |  | } | 
| 71 |  |  |  |  |  |  |  | 
| 72 | 0 |  |  |  |  |  | return html_offset; | 
| 73 |  |  |  |  |  |  | } | 
| 74 |  |  |  |  |  |  |  | 
| 75 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escape_start_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 76 |  |  |  |  |  |  | { | 
| 77 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') { | 
| 78 | 0 |  |  |  |  |  | html_offset++; | 
| 79 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_DASH_DASH; | 
| 80 |  |  |  |  |  |  | } | 
| 81 |  |  |  |  |  |  | else { | 
| 82 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 83 |  |  |  |  |  |  | } | 
| 84 |  |  |  |  |  |  |  | 
| 85 | 0 |  |  |  |  |  | return html_offset; | 
| 86 |  |  |  |  |  |  | } | 
| 87 |  |  |  |  |  |  |  | 
| 88 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_end_tag_open(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 89 |  |  |  |  |  |  | { | 
| 90 | 0 | 0 |  |  |  |  | if(myhtml_ascii_char_cmp(html[html_offset])) { | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 91 | 0 |  |  |  |  |  | token_node->str.length = (html_offset + tree->global_offset); | 
| 92 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_END_TAG_NAME; | 
| 93 |  |  |  |  |  |  | } | 
| 94 |  |  |  |  |  |  | else { | 
| 95 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 96 |  |  |  |  |  |  | } | 
| 97 |  |  |  |  |  |  |  | 
| 98 | 0 |  |  |  |  |  | return html_offset; | 
| 99 |  |  |  |  |  |  | } | 
| 100 |  |  |  |  |  |  |  | 
| 101 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_end_tag_name(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 102 |  |  |  |  |  |  | { | 
| 103 | 0 | 0 |  |  |  |  | while(html_offset < html_size) | 
| 104 |  |  |  |  |  |  | { | 
| 105 | 0 | 0 |  |  |  |  | if(myhtml_whithspace(html[html_offset], ==, ||)) | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 106 |  |  |  |  |  |  | { | 
| 107 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 108 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 109 | 0 |  |  |  |  |  | html_offset++; | 
| 110 | 0 |  |  |  |  |  | break; | 
| 111 |  |  |  |  |  |  | } | 
| 112 |  |  |  |  |  |  |  | 
| 113 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 114 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 115 |  |  |  |  |  |  |  | 
| 116 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) | 
| 117 |  |  |  |  |  |  | { | 
| 118 | 0 |  |  |  |  |  | token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT); | 
| 119 | 0 | 0 |  |  |  |  | if(token_node == NULL) { | 
| 120 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 121 | 0 |  |  |  |  |  | return 0; | 
| 122 |  |  |  |  |  |  | } | 
| 123 |  |  |  |  |  |  |  | 
| 124 | 0 |  |  |  |  |  | token_node->raw_begin = tmp_size; | 
| 125 | 0 |  |  |  |  |  | token_node->raw_length = 6; | 
| 126 | 0 |  |  |  |  |  | token_node->tag_id = MyHTML_TAG_SCRIPT; | 
| 127 | 0 |  |  |  |  |  | token_node->type = MyHTML_TOKEN_TYPE_CLOSE; | 
| 128 |  |  |  |  |  |  |  | 
| 129 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME; | 
| 130 |  |  |  |  |  |  | } | 
| 131 |  |  |  |  |  |  | else { | 
| 132 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 133 |  |  |  |  |  |  | } | 
| 134 |  |  |  |  |  |  |  | 
| 135 | 0 |  |  |  |  |  | html_offset++; | 
| 136 | 0 |  |  |  |  |  | break; | 
| 137 |  |  |  |  |  |  | } | 
| 138 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '/') | 
| 139 |  |  |  |  |  |  | { | 
| 140 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 141 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 142 | 0 |  |  |  |  |  | html_offset++; | 
| 143 | 0 |  |  |  |  |  | break; | 
| 144 |  |  |  |  |  |  | } | 
| 145 |  |  |  |  |  |  |  | 
| 146 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 147 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 148 |  |  |  |  |  |  |  | 
| 149 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) | 
| 150 |  |  |  |  |  |  | { | 
| 151 | 0 |  |  |  |  |  | token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT); | 
| 152 | 0 | 0 |  |  |  |  | if(token_node == NULL) { | 
| 153 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 154 | 0 |  |  |  |  |  | return 0; | 
| 155 |  |  |  |  |  |  | } | 
| 156 |  |  |  |  |  |  |  | 
| 157 | 0 |  |  |  |  |  | token_node->raw_begin = tmp_size; | 
| 158 | 0 |  |  |  |  |  | token_node->raw_length = 6; | 
| 159 | 0 |  |  |  |  |  | token_node->tag_id = MyHTML_TAG_SCRIPT; | 
| 160 | 0 |  |  |  |  |  | token_node->type = MyHTML_TOKEN_TYPE_CLOSE|MyHTML_TOKEN_TYPE_CLOSE_SELF; | 
| 161 |  |  |  |  |  |  |  | 
| 162 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME; | 
| 163 |  |  |  |  |  |  | } | 
| 164 |  |  |  |  |  |  | else { | 
| 165 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 166 |  |  |  |  |  |  | } | 
| 167 |  |  |  |  |  |  |  | 
| 168 | 0 |  |  |  |  |  | html_offset++; | 
| 169 | 0 |  |  |  |  |  | break; | 
| 170 |  |  |  |  |  |  | } | 
| 171 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '>') | 
| 172 |  |  |  |  |  |  | { | 
| 173 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 174 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 175 | 0 |  |  |  |  |  | html_offset++; | 
| 176 | 0 |  |  |  |  |  | break; | 
| 177 |  |  |  |  |  |  | } | 
| 178 |  |  |  |  |  |  |  | 
| 179 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 180 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 181 |  |  |  |  |  |  |  | 
| 182 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) | 
| 183 |  |  |  |  |  |  | { | 
| 184 | 0 |  |  |  |  |  | token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT); | 
| 185 | 0 | 0 |  |  |  |  | if(token_node == NULL) { | 
| 186 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 187 | 0 |  |  |  |  |  | return 0; | 
| 188 |  |  |  |  |  |  | } | 
| 189 |  |  |  |  |  |  |  | 
| 190 | 0 |  |  |  |  |  | token_node->raw_begin = tmp_size; | 
| 191 | 0 |  |  |  |  |  | token_node->raw_length = 6; | 
| 192 | 0 |  |  |  |  |  | token_node->tag_id = MyHTML_TAG_SCRIPT; | 
| 193 | 0 |  |  |  |  |  | token_node->type  = MyHTML_TOKEN_TYPE_CLOSE; | 
| 194 |  |  |  |  |  |  |  | 
| 195 | 0 |  |  |  |  |  | html_offset++; | 
| 196 |  |  |  |  |  |  |  | 
| 197 | 0 |  |  |  |  |  | token_node->element_length = (tree->global_offset + html_offset) - token_node->element_begin; | 
| 198 |  |  |  |  |  |  |  | 
| 199 | 0 | 0 |  |  |  |  | if(myhtml_queue_add(tree, html_offset, token_node) != MyHTML_STATUS_OK) { | 
| 200 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 201 | 0 |  |  |  |  |  | return 0; | 
| 202 |  |  |  |  |  |  | } | 
| 203 |  |  |  |  |  |  |  | 
| 204 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_DATA; | 
| 205 |  |  |  |  |  |  | } | 
| 206 |  |  |  |  |  |  | else { | 
| 207 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 208 | 0 |  |  |  |  |  | html_offset++; | 
| 209 |  |  |  |  |  |  | } | 
| 210 |  |  |  |  |  |  |  | 
| 211 | 0 |  |  |  |  |  | break; | 
| 212 |  |  |  |  |  |  | } | 
| 213 | 0 | 0 |  |  |  |  | else if(myhtml_ascii_char_unless_cmp(html[html_offset])) | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 214 |  |  |  |  |  |  | { | 
| 215 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 216 | 0 |  |  |  |  |  | break; | 
| 217 |  |  |  |  |  |  | } | 
| 218 |  |  |  |  |  |  |  | 
| 219 | 0 |  |  |  |  |  | html_offset++; | 
| 220 |  |  |  |  |  |  | } | 
| 221 |  |  |  |  |  |  |  | 
| 222 | 0 |  |  |  |  |  | return html_offset; | 
| 223 |  |  |  |  |  |  | } | 
| 224 |  |  |  |  |  |  |  | 
| 225 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escaped_dash_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 226 |  |  |  |  |  |  | { | 
| 227 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') { | 
| 228 | 0 |  |  |  |  |  | html_offset++; | 
| 229 | 0 |  |  |  |  |  | return html_offset; | 
| 230 |  |  |  |  |  |  | } | 
| 231 |  |  |  |  |  |  |  | 
| 232 | 0 | 0 |  |  |  |  | if(html[html_offset] == '<') { | 
| 233 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN; | 
| 234 |  |  |  |  |  |  | } | 
| 235 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '>') { | 
| 236 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 237 |  |  |  |  |  |  | } | 
| 238 |  |  |  |  |  |  | else { | 
| 239 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 240 |  |  |  |  |  |  | } | 
| 241 |  |  |  |  |  |  |  | 
| 242 | 0 |  |  |  |  |  | html_offset++; | 
| 243 |  |  |  |  |  |  |  | 
| 244 | 0 |  |  |  |  |  | return html_offset; | 
| 245 |  |  |  |  |  |  | } | 
| 246 |  |  |  |  |  |  |  | 
| 247 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escaped_less_than_sign(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 248 |  |  |  |  |  |  | { | 
| 249 | 0 | 0 |  |  |  |  | if(html[html_offset] == '/') { | 
| 250 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_END_TAG_OPEN; | 
| 251 | 0 |  |  |  |  |  | html_offset++; | 
| 252 |  |  |  |  |  |  | } | 
| 253 | 0 | 0 |  |  |  |  | else if(myhtml_ascii_char_cmp(html[html_offset])) { | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 254 | 0 |  |  |  |  |  | token_node->str.length = (html_offset + tree->global_offset); | 
| 255 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPE_START; | 
| 256 |  |  |  |  |  |  | } | 
| 257 |  |  |  |  |  |  | else { | 
| 258 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 259 |  |  |  |  |  |  | } | 
| 260 |  |  |  |  |  |  |  | 
| 261 | 0 |  |  |  |  |  | return html_offset; | 
| 262 |  |  |  |  |  |  | } | 
| 263 |  |  |  |  |  |  |  | 
| 264 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escaped_end_tag_open(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 265 |  |  |  |  |  |  | { | 
| 266 | 0 | 0 |  |  |  |  | if(myhtml_ascii_char_cmp(html[html_offset])) { | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 267 | 0 |  |  |  |  |  | token_node->str.length = (html_offset + tree->global_offset); | 
| 268 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_END_TAG_NAME; | 
| 269 |  |  |  |  |  |  | } | 
| 270 |  |  |  |  |  |  | else { | 
| 271 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 272 |  |  |  |  |  |  | } | 
| 273 |  |  |  |  |  |  |  | 
| 274 | 0 |  |  |  |  |  | return html_offset; | 
| 275 |  |  |  |  |  |  | } | 
| 276 |  |  |  |  |  |  |  | 
| 277 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escaped_end_tag_name(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 278 |  |  |  |  |  |  | { | 
| 279 | 0 | 0 |  |  |  |  | while(html_offset < html_size) | 
| 280 |  |  |  |  |  |  | { | 
| 281 | 0 | 0 |  |  |  |  | if(myhtml_whithspace(html[html_offset], ==, ||)) | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 282 |  |  |  |  |  |  | { | 
| 283 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 284 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 285 | 0 |  |  |  |  |  | html_offset++; | 
| 286 | 0 |  |  |  |  |  | break; | 
| 287 |  |  |  |  |  |  | } | 
| 288 |  |  |  |  |  |  |  | 
| 289 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 290 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 291 |  |  |  |  |  |  |  | 
| 292 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) | 
| 293 |  |  |  |  |  |  | { | 
| 294 | 0 |  |  |  |  |  | token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT); | 
| 295 | 0 | 0 |  |  |  |  | if(token_node == NULL) { | 
| 296 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 297 | 0 |  |  |  |  |  | return 0; | 
| 298 |  |  |  |  |  |  | } | 
| 299 |  |  |  |  |  |  |  | 
| 300 | 0 |  |  |  |  |  | token_node->raw_begin = tmp_size; | 
| 301 | 0 |  |  |  |  |  | token_node->raw_length = 6; | 
| 302 | 0 |  |  |  |  |  | token_node->tag_id = MyHTML_TAG_SCRIPT; | 
| 303 | 0 |  |  |  |  |  | token_node->type = MyHTML_TOKEN_TYPE_CLOSE; | 
| 304 |  |  |  |  |  |  |  | 
| 305 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME; | 
| 306 |  |  |  |  |  |  | } | 
| 307 |  |  |  |  |  |  | else { | 
| 308 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 309 |  |  |  |  |  |  | } | 
| 310 |  |  |  |  |  |  |  | 
| 311 | 0 |  |  |  |  |  | html_offset++; | 
| 312 | 0 |  |  |  |  |  | break; | 
| 313 |  |  |  |  |  |  | } | 
| 314 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '/') | 
| 315 |  |  |  |  |  |  | { | 
| 316 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 317 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 318 | 0 |  |  |  |  |  | html_offset++; | 
| 319 | 0 |  |  |  |  |  | break; | 
| 320 |  |  |  |  |  |  | } | 
| 321 |  |  |  |  |  |  |  | 
| 322 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 323 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 324 |  |  |  |  |  |  |  | 
| 325 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) | 
| 326 |  |  |  |  |  |  | { | 
| 327 | 0 |  |  |  |  |  | token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT); | 
| 328 | 0 | 0 |  |  |  |  | if(token_node == NULL) { | 
| 329 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 330 | 0 |  |  |  |  |  | return 0; | 
| 331 |  |  |  |  |  |  | } | 
| 332 |  |  |  |  |  |  |  | 
| 333 | 0 |  |  |  |  |  | token_node->raw_begin = tmp_size; | 
| 334 | 0 |  |  |  |  |  | token_node->raw_length = 6; | 
| 335 | 0 |  |  |  |  |  | token_node->tag_id = MyHTML_TAG_SCRIPT; | 
| 336 | 0 |  |  |  |  |  | token_node->type = MyHTML_TOKEN_TYPE_CLOSE|MyHTML_TOKEN_TYPE_CLOSE_SELF; | 
| 337 |  |  |  |  |  |  |  | 
| 338 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME; | 
| 339 |  |  |  |  |  |  | } | 
| 340 |  |  |  |  |  |  | else { | 
| 341 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 342 |  |  |  |  |  |  | } | 
| 343 |  |  |  |  |  |  |  | 
| 344 | 0 |  |  |  |  |  | html_offset++; | 
| 345 | 0 |  |  |  |  |  | break; | 
| 346 |  |  |  |  |  |  | } | 
| 347 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '>') | 
| 348 |  |  |  |  |  |  | { | 
| 349 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 350 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 351 | 0 |  |  |  |  |  | html_offset++; | 
| 352 | 0 |  |  |  |  |  | break; | 
| 353 |  |  |  |  |  |  | } | 
| 354 |  |  |  |  |  |  |  | 
| 355 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 356 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 357 |  |  |  |  |  |  |  | 
| 358 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) | 
| 359 |  |  |  |  |  |  | { | 
| 360 | 0 |  |  |  |  |  | token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT); | 
| 361 | 0 | 0 |  |  |  |  | if(token_node == NULL) { | 
| 362 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 363 | 0 |  |  |  |  |  | return 0; | 
| 364 |  |  |  |  |  |  | } | 
| 365 |  |  |  |  |  |  |  | 
| 366 | 0 |  |  |  |  |  | token_node->raw_begin = tmp_size; | 
| 367 | 0 |  |  |  |  |  | token_node->raw_length = 6; | 
| 368 | 0 |  |  |  |  |  | token_node->tag_id = MyHTML_TAG_SCRIPT; | 
| 369 | 0 |  |  |  |  |  | token_node->type = MyHTML_TOKEN_TYPE_CLOSE; | 
| 370 |  |  |  |  |  |  |  | 
| 371 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_DATA; | 
| 372 |  |  |  |  |  |  |  | 
| 373 | 0 |  |  |  |  |  | html_offset++; | 
| 374 |  |  |  |  |  |  |  | 
| 375 | 0 |  |  |  |  |  | token_node->element_length = (tree->global_offset + html_offset) - token_node->element_begin; | 
| 376 |  |  |  |  |  |  |  | 
| 377 | 0 | 0 |  |  |  |  | if(myhtml_queue_add(tree, html_offset, token_node) != MyHTML_STATUS_OK) { | 
| 378 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP; | 
| 379 | 0 |  |  |  |  |  | return 0; | 
| 380 |  |  |  |  |  |  | } | 
| 381 |  |  |  |  |  |  | } | 
| 382 |  |  |  |  |  |  | else { | 
| 383 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 384 | 0 |  |  |  |  |  | html_offset++; | 
| 385 |  |  |  |  |  |  | } | 
| 386 | 0 |  |  |  |  |  | break; | 
| 387 |  |  |  |  |  |  | } | 
| 388 | 0 | 0 |  |  |  |  | else if(myhtml_ascii_char_unless_cmp(html[html_offset])) | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 389 |  |  |  |  |  |  | { | 
| 390 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 391 | 0 |  |  |  |  |  | break; | 
| 392 |  |  |  |  |  |  | } | 
| 393 |  |  |  |  |  |  |  | 
| 394 | 0 |  |  |  |  |  | html_offset++; | 
| 395 |  |  |  |  |  |  | } | 
| 396 |  |  |  |  |  |  |  | 
| 397 | 0 |  |  |  |  |  | return html_offset; | 
| 398 |  |  |  |  |  |  | } | 
| 399 |  |  |  |  |  |  |  | 
| 400 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escaped(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 401 |  |  |  |  |  |  | { | 
| 402 | 0 | 0 |  |  |  |  | while(html_offset < html_size) | 
| 403 |  |  |  |  |  |  | { | 
| 404 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') | 
| 405 |  |  |  |  |  |  | { | 
| 406 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_DASH; | 
| 407 | 0 |  |  |  |  |  | html_offset++; | 
| 408 | 0 |  |  |  |  |  | break; | 
| 409 |  |  |  |  |  |  | } | 
| 410 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '<') | 
| 411 |  |  |  |  |  |  | { | 
| 412 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN; | 
| 413 | 0 |  |  |  |  |  | html_offset++; | 
| 414 | 0 |  |  |  |  |  | break; | 
| 415 |  |  |  |  |  |  | } | 
| 416 |  |  |  |  |  |  |  | 
| 417 | 0 |  |  |  |  |  | html_offset++; | 
| 418 |  |  |  |  |  |  | } | 
| 419 |  |  |  |  |  |  |  | 
| 420 | 0 |  |  |  |  |  | return html_offset; | 
| 421 |  |  |  |  |  |  | } | 
| 422 |  |  |  |  |  |  |  | 
| 423 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_escaped_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 424 |  |  |  |  |  |  | { | 
| 425 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') { | 
| 426 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_DASH_DASH; | 
| 427 | 0 |  |  |  |  |  | html_offset++; | 
| 428 |  |  |  |  |  |  | } | 
| 429 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '<') { | 
| 430 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN; | 
| 431 |  |  |  |  |  |  | } | 
| 432 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '\0') { | 
| 433 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 434 |  |  |  |  |  |  | } | 
| 435 |  |  |  |  |  |  | else { | 
| 436 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 437 | 0 |  |  |  |  |  | html_offset++; | 
| 438 |  |  |  |  |  |  | } | 
| 439 |  |  |  |  |  |  |  | 
| 440 | 0 |  |  |  |  |  | return html_offset; | 
| 441 |  |  |  |  |  |  | } | 
| 442 |  |  |  |  |  |  |  | 
| 443 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_double_escape_start(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 444 |  |  |  |  |  |  | { | 
| 445 | 0 | 0 |  |  |  |  | while(html_offset < html_size) | 
| 446 |  |  |  |  |  |  | { | 
| 447 | 0 | 0 |  |  |  |  | if(myhtml_whithspace(html[html_offset], ==, ||) || html[html_offset] == '/' || html[html_offset] == '>') | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 448 |  |  |  |  |  |  | { | 
| 449 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 450 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 451 | 0 |  |  |  |  |  | html_offset++; | 
| 452 | 0 |  |  |  |  |  | break; | 
| 453 |  |  |  |  |  |  | } | 
| 454 |  |  |  |  |  |  |  | 
| 455 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 456 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 457 |  |  |  |  |  |  |  | 
| 458 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) { | 
| 459 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 460 |  |  |  |  |  |  | } | 
| 461 |  |  |  |  |  |  | else { | 
| 462 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 463 |  |  |  |  |  |  | } | 
| 464 |  |  |  |  |  |  |  | 
| 465 | 0 |  |  |  |  |  | html_offset++; | 
| 466 | 0 |  |  |  |  |  | break; | 
| 467 |  |  |  |  |  |  | } | 
| 468 | 0 | 0 |  |  |  |  | else if(myhtml_ascii_char_unless_cmp(html[html_offset])) | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 469 |  |  |  |  |  |  | { | 
| 470 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 471 | 0 |  |  |  |  |  | break; | 
| 472 |  |  |  |  |  |  | } | 
| 473 |  |  |  |  |  |  |  | 
| 474 | 0 |  |  |  |  |  | html_offset++; | 
| 475 |  |  |  |  |  |  | } | 
| 476 |  |  |  |  |  |  |  | 
| 477 | 0 |  |  |  |  |  | return html_offset; | 
| 478 |  |  |  |  |  |  | } | 
| 479 |  |  |  |  |  |  |  | 
| 480 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_double_escaped(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 481 |  |  |  |  |  |  | { | 
| 482 | 0 | 0 |  |  |  |  | while(html_offset < html_size) | 
| 483 |  |  |  |  |  |  | { | 
| 484 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') | 
| 485 |  |  |  |  |  |  | { | 
| 486 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_DASH; | 
| 487 | 0 |  |  |  |  |  | html_offset++; | 
| 488 | 0 |  |  |  |  |  | break; | 
| 489 |  |  |  |  |  |  | } | 
| 490 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '<') | 
| 491 |  |  |  |  |  |  | { | 
| 492 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN; | 
| 493 | 0 |  |  |  |  |  | html_offset++; | 
| 494 | 0 |  |  |  |  |  | break; | 
| 495 |  |  |  |  |  |  | } | 
| 496 |  |  |  |  |  |  |  | 
| 497 | 0 |  |  |  |  |  | html_offset++; | 
| 498 |  |  |  |  |  |  | } | 
| 499 |  |  |  |  |  |  |  | 
| 500 | 0 |  |  |  |  |  | return html_offset; | 
| 501 |  |  |  |  |  |  | } | 
| 502 |  |  |  |  |  |  |  | 
| 503 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_double_escaped_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 504 |  |  |  |  |  |  | { | 
| 505 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') | 
| 506 |  |  |  |  |  |  | { | 
| 507 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_DASH_DASH; | 
| 508 |  |  |  |  |  |  | } | 
| 509 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '<') | 
| 510 |  |  |  |  |  |  | { | 
| 511 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN; | 
| 512 |  |  |  |  |  |  | } | 
| 513 |  |  |  |  |  |  | else { | 
| 514 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 515 |  |  |  |  |  |  | } | 
| 516 |  |  |  |  |  |  |  | 
| 517 | 0 |  |  |  |  |  | html_offset++; | 
| 518 |  |  |  |  |  |  |  | 
| 519 | 0 |  |  |  |  |  | return html_offset; | 
| 520 |  |  |  |  |  |  | } | 
| 521 |  |  |  |  |  |  |  | 
| 522 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_double_escaped_dash_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 523 |  |  |  |  |  |  | { | 
| 524 | 0 | 0 |  |  |  |  | if(html[html_offset] == '-') { | 
| 525 | 0 |  |  |  |  |  | html_offset++; | 
| 526 | 0 |  |  |  |  |  | return html_offset; | 
| 527 |  |  |  |  |  |  | } | 
| 528 |  |  |  |  |  |  |  | 
| 529 | 0 | 0 |  |  |  |  | if(html[html_offset] == '<') | 
| 530 |  |  |  |  |  |  | { | 
| 531 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN; | 
| 532 |  |  |  |  |  |  | } | 
| 533 | 0 | 0 |  |  |  |  | else if(html[html_offset] == '>') | 
| 534 |  |  |  |  |  |  | { | 
| 535 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA; | 
| 536 |  |  |  |  |  |  | } | 
| 537 |  |  |  |  |  |  | else { | 
| 538 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 539 |  |  |  |  |  |  | } | 
| 540 |  |  |  |  |  |  |  | 
| 541 | 0 |  |  |  |  |  | html_offset++; | 
| 542 |  |  |  |  |  |  |  | 
| 543 | 0 |  |  |  |  |  | return html_offset; | 
| 544 |  |  |  |  |  |  | } | 
| 545 |  |  |  |  |  |  |  | 
| 546 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_double_escaped_less_than_sign(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 547 |  |  |  |  |  |  | { | 
| 548 | 0 | 0 |  |  |  |  | if(html[html_offset] == '/') { | 
| 549 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPE_END; | 
| 550 | 0 |  |  |  |  |  | html_offset++; | 
| 551 |  |  |  |  |  |  |  | 
| 552 | 0 |  |  |  |  |  | token_node->str.length = (html_offset + tree->global_offset); | 
| 553 |  |  |  |  |  |  | } | 
| 554 |  |  |  |  |  |  | else { | 
| 555 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 556 |  |  |  |  |  |  | } | 
| 557 |  |  |  |  |  |  |  | 
| 558 | 0 |  |  |  |  |  | return html_offset; | 
| 559 |  |  |  |  |  |  | } | 
| 560 |  |  |  |  |  |  |  | 
| 561 | 0 |  |  |  |  |  | size_t myhtml_tokenizer_state_script_data_double_escape_end(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size) | 
| 562 |  |  |  |  |  |  | { | 
| 563 | 0 | 0 |  |  |  |  | while(html_offset < html_size) | 
| 564 |  |  |  |  |  |  | { | 
| 565 | 0 | 0 |  |  |  |  | if(myhtml_whithspace(html[html_offset], ==, ||) || html[html_offset] == '/' || html[html_offset] == '>') | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 566 |  |  |  |  |  |  | { | 
| 567 | 0 | 0 |  |  |  |  | if(((html_offset + tree->global_offset) - token_node->str.length) != 6) { | 
| 568 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 569 | 0 |  |  |  |  |  | html_offset++; | 
| 570 | 0 |  |  |  |  |  | break; | 
| 571 |  |  |  |  |  |  | } | 
| 572 |  |  |  |  |  |  |  | 
| 573 | 0 |  |  |  |  |  | size_t tmp_size = token_node->str.length; | 
| 574 | 0 |  |  |  |  |  | const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6); | 
| 575 |  |  |  |  |  |  |  | 
| 576 | 0 | 0 |  |  |  |  | if(mycore_strncasecmp(tem_name, "script", 6) == 0) { | 
| 577 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED; | 
| 578 |  |  |  |  |  |  | } | 
| 579 |  |  |  |  |  |  | else { | 
| 580 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 581 |  |  |  |  |  |  | } | 
| 582 |  |  |  |  |  |  |  | 
| 583 | 0 |  |  |  |  |  | html_offset++; | 
| 584 | 0 |  |  |  |  |  | break; | 
| 585 |  |  |  |  |  |  | } | 
| 586 | 0 | 0 |  |  |  |  | else if(myhtml_ascii_char_unless_cmp(html[html_offset])) | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
|  |  | 0 |  |  |  |  |  | 
| 587 |  |  |  |  |  |  | { | 
| 588 | 0 |  |  |  |  |  | myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED; | 
| 589 | 0 |  |  |  |  |  | break; | 
| 590 |  |  |  |  |  |  | } | 
| 591 |  |  |  |  |  |  |  | 
| 592 | 0 |  |  |  |  |  | html_offset++; | 
| 593 |  |  |  |  |  |  | } | 
| 594 |  |  |  |  |  |  |  | 
| 595 | 0 |  |  |  |  |  | return html_offset; | 
| 596 |  |  |  |  |  |  | } | 
| 597 |  |  |  |  |  |  |  | 
| 598 |  |  |  |  |  |  |  | 
| 599 |  |  |  |  |  |  |  | 
| 600 |  |  |  |  |  |  |  |