OLD | NEW |
1 /* | 1 /* |
2 * Section utility functions | 2 * Section utility functions |
3 * | 3 * |
4 * Copyright (C) 2001-2007 Peter Johnson | 4 * Copyright (C) 2001-2007 Peter Johnson |
5 * | 5 * |
6 * Redistribution and use in source and binary forms, with or without | 6 * Redistribution and use in source and binary forms, with or without |
7 * modification, are permitted provided that the following conditions | 7 * modification, are permitted provided that the following conditions |
8 * are met: | 8 * are met: |
9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
11 * 2. Redistributions in binary form must reproduce the above copyright | 11 * 2. Redistributions in binary form must reproduce the above copyright |
12 * notice, this list of conditions and the following disclaimer in the | 12 * notice, this list of conditions and the following disclaimer in the |
13 * documentation and/or other materials provided with the distribution. | 13 * documentation and/or other materials provided with the distribution. |
14 * | 14 * |
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS'' | 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND OTHER CONTRIBUTORS ``AS IS'' |
16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | 16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE | 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR OTHER CONTRIBUTORS BE |
19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | 21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | 22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | 24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
25 * POSSIBILITY OF SUCH DAMAGE. | 25 * POSSIBILITY OF SUCH DAMAGE. |
26 */ | 26 */ |
27 #include "util.h" | 27 #include "util.h" |
28 /*@unused@*/ RCSID("$Id: section.c 2109 2008-06-08 09:06:05Z peter $"); | 28 /*@unused@*/ RCSID("$Id: section.c 2310 2010-03-28 19:28:54Z peter $"); |
29 | 29 |
30 #include <limits.h> | 30 #include <limits.h> |
31 | 31 |
32 #include "libyasm-stdint.h" | 32 #include "libyasm-stdint.h" |
33 #include "coretype.h" | 33 #include "coretype.h" |
34 #include "hamt.h" | 34 #include "hamt.h" |
35 #include "valparam.h" | 35 #include "valparam.h" |
36 #include "assocdat.h" | 36 #include "assocdat.h" |
37 | 37 |
38 #include "linemap.h" | 38 #include "linemap.h" |
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 STAILQ_INSERT_TAIL(&s->bcs, bc, link); | 339 STAILQ_INSERT_TAIL(&s->bcs, bc, link); |
340 | 340 |
341 /* Initialize relocs */ | 341 /* Initialize relocs */ |
342 STAILQ_INIT(&s->relocs); | 342 STAILQ_INIT(&s->relocs); |
343 s->destroy_reloc = NULL; | 343 s->destroy_reloc = NULL; |
344 | 344 |
345 s->code = code; | 345 s->code = code; |
346 s->res_only = res_only; | 346 s->res_only = res_only; |
347 s->def = 0; | 347 s->def = 0; |
348 | 348 |
| 349 /* Initialize object format specific data */ |
| 350 yasm_objfmt_init_new_section(s, line); |
| 351 |
349 *isnew = 1; | 352 *isnew = 1; |
350 return s; | 353 return s; |
351 } | 354 } |
352 /*@=onlytrans@*/ | 355 /*@=onlytrans@*/ |
353 | 356 |
354 int | 357 int |
355 yasm_object_directive(yasm_object *object, const char *name, | 358 yasm_object_directive(yasm_object *object, const char *name, |
356 const char *parser, yasm_valparamhead *valparams, | 359 const char *parser, yasm_valparamhead *valparams, |
357 yasm_valparamhead *objext_valparams, | 360 yasm_valparamhead *objext_valparams, |
358 unsigned long line) | 361 unsigned long line) |
(...skipping 498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
857 long pos_thres; | 860 long pos_thres; |
858 | 861 |
859 int id; | 862 int id; |
860 | 863 |
861 int active; | 864 int active; |
862 | 865 |
863 /* NULL-terminated array of spans that led to this span. Used only for | 866 /* NULL-terminated array of spans that led to this span. Used only for |
864 * checking for circular references (cycles) with id=0 spans. | 867 * checking for circular references (cycles) with id=0 spans. |
865 */ | 868 */ |
866 yasm_span **backtrace; | 869 yasm_span **backtrace; |
| 870 int backtrace_size; |
867 | 871 |
868 /* First offset setter following this span's bytecode */ | 872 /* First offset setter following this span's bytecode */ |
869 yasm_offset_setter *os; | 873 yasm_offset_setter *os; |
870 }; | 874 }; |
871 | 875 |
872 typedef struct optimize_data { | 876 typedef struct optimize_data { |
873 /*@reldef@*/ TAILQ_HEAD(, yasm_span) spans; | 877 /*@reldef@*/ TAILQ_HEAD(yasm_span_head, yasm_span) spans; |
874 /*@reldef@*/ STAILQ_HEAD(, yasm_span) QA, QB; | 878 /*@reldef@*/ STAILQ_HEAD(yasm_span_shead, yasm_span) QA, QB; |
875 /*@only@*/ IntervalTree *itree; | 879 /*@only@*/ IntervalTree *itree; |
876 /*@reldef@*/ STAILQ_HEAD(, yasm_offset_setter) offset_setters; | 880 /*@reldef@*/ STAILQ_HEAD(offset_setters_head, yasm_offset_setter) |
| 881 offset_setters; |
877 long len_diff; /* used only for optimize_term_expand */ | 882 long len_diff; /* used only for optimize_term_expand */ |
878 yasm_span *span; /* used only for check_cycle */ | 883 yasm_span *span; /* used only for check_cycle */ |
879 yasm_offset_setter *os; | 884 yasm_offset_setter *os; |
880 } optimize_data; | 885 } optimize_data; |
881 | 886 |
882 static yasm_span * | 887 static yasm_span * |
883 create_span(yasm_bytecode *bc, int id, /*@null@*/ const yasm_value *value, | 888 create_span(yasm_bytecode *bc, int id, /*@null@*/ const yasm_value *value, |
884 long neg_thres, long pos_thres, yasm_offset_setter *os) | 889 long neg_thres, long pos_thres, yasm_offset_setter *os) |
885 { | 890 { |
886 yasm_span *span = yasm_xmalloc(sizeof(yasm_span)); | 891 yasm_span *span = yasm_xmalloc(sizeof(yasm_span)); |
887 | 892 |
888 span->bc = bc; | 893 span->bc = bc; |
889 if (value) | 894 if (value) |
890 yasm_value_init_copy(&span->depval, value); | 895 yasm_value_init_copy(&span->depval, value); |
891 else | 896 else |
892 yasm_value_initialize(&span->depval, NULL, 0); | 897 yasm_value_initialize(&span->depval, NULL, 0); |
893 span->rel_term = NULL; | 898 span->rel_term = NULL; |
894 span->terms = NULL; | 899 span->terms = NULL; |
895 span->items = NULL; | 900 span->items = NULL; |
896 span->num_terms = 0; | 901 span->num_terms = 0; |
897 span->cur_val = 0; | 902 span->cur_val = 0; |
898 span->new_val = 0; | 903 span->new_val = 0; |
899 span->neg_thres = neg_thres; | 904 span->neg_thres = neg_thres; |
900 span->pos_thres = pos_thres; | 905 span->pos_thres = pos_thres; |
901 span->id = id; | 906 span->id = id; |
902 span->active = 1; | 907 span->active = 1; |
903 span->backtrace = NULL; | 908 span->backtrace = NULL; |
| 909 span->backtrace_size = 0; |
904 span->os = os; | 910 span->os = os; |
905 | 911 |
906 return span; | 912 return span; |
907 } | 913 } |
908 | 914 |
909 static void | 915 static void |
910 optimize_add_span(void *add_span_data, yasm_bytecode *bc, int id, | 916 optimize_add_span(void *add_span_data, yasm_bytecode *bc, int id, |
911 const yasm_value *value, long neg_thres, long pos_thres) | 917 const yasm_value *value, long neg_thres, long pos_thres) |
912 { | 918 { |
913 optimize_data *optd = (optimize_data *)add_span_data; | 919 optimize_data *optd = (optimize_data *)add_span_data; |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1153 | 1159 |
1154 IT_insert(itree, (long)low, (long)high, term); | 1160 IT_insert(itree, (long)low, (long)high, term); |
1155 } | 1161 } |
1156 | 1162 |
1157 static void | 1163 static void |
1158 check_cycle(IntervalTreeNode *node, void *d) | 1164 check_cycle(IntervalTreeNode *node, void *d) |
1159 { | 1165 { |
1160 optimize_data *optd = d; | 1166 optimize_data *optd = d; |
1161 yasm_span_term *term = node->data; | 1167 yasm_span_term *term = node->data; |
1162 yasm_span *depspan = term->span; | 1168 yasm_span *depspan = term->span; |
1163 int bt_size = 0, dep_bt_size = 0; | 1169 int i; |
| 1170 int depspan_bt_alloc; |
1164 | 1171 |
1165 /* Only check for cycles in id=0 spans */ | 1172 /* Only check for cycles in id=0 spans */ |
1166 if (depspan->id > 0) | 1173 if (depspan->id > 0) |
1167 return; | 1174 return; |
1168 | 1175 |
1169 /* Check for a circular reference by looking to see if this dependent | 1176 /* Check for a circular reference by looking to see if this dependent |
1170 * span is in our backtrace. | 1177 * span is in our backtrace. |
1171 */ | 1178 */ |
1172 if (optd->span->backtrace) { | 1179 if (optd->span->backtrace) { |
1173 yasm_span *s; | 1180 for (i=0; i<optd->span->backtrace_size; i++) { |
1174 while ((s = optd->span->backtrace[bt_size])) { | 1181 if (optd->span->backtrace[i] == depspan) |
1175 bt_size++; | |
1176 if (s == depspan) | |
1177 yasm_error_set(YASM_ERROR_VALUE, | 1182 yasm_error_set(YASM_ERROR_VALUE, |
1178 N_("circular reference detected")); | 1183 N_("circular reference detected")); |
1179 } | 1184 } |
1180 } | 1185 } |
1181 | 1186 |
1182 /* Add our complete backtrace and ourselves to backtrace of dependent | 1187 /* Add our complete backtrace and ourselves to backtrace of dependent |
1183 * span. | 1188 * span. |
1184 */ | 1189 */ |
1185 if (!depspan->backtrace) { | 1190 if (!depspan->backtrace) { |
1186 depspan->backtrace = yasm_xmalloc((bt_size+2)*sizeof(yasm_span *)); | 1191 depspan->backtrace = yasm_xmalloc((optd->span->backtrace_size+1)* |
1187 if (bt_size > 0) | 1192 sizeof(yasm_span *)); |
| 1193 if (optd->span->backtrace_size > 0) |
1188 memcpy(depspan->backtrace, optd->span->backtrace, | 1194 memcpy(depspan->backtrace, optd->span->backtrace, |
1189 bt_size*sizeof(yasm_span *)); | 1195 optd->span->backtrace_size*sizeof(yasm_span *)); |
1190 depspan->backtrace[bt_size] = optd->span; | 1196 depspan->backtrace[optd->span->backtrace_size] = optd->span; |
1191 depspan->backtrace[bt_size+1] = NULL; | 1197 depspan->backtrace_size = optd->span->backtrace_size+1; |
1192 return; | 1198 return; |
1193 } | 1199 } |
1194 | 1200 |
1195 while (depspan->backtrace[dep_bt_size]) | 1201 /* Add our complete backtrace, checking for duplicates */ |
1196 dep_bt_size++; | 1202 depspan_bt_alloc = depspan->backtrace_size; |
1197 depspan->backtrace = | 1203 for (i=0; i<optd->span->backtrace_size; i++) { |
1198 yasm_xrealloc(depspan->backtrace, | 1204 int present = 0; |
1199 (dep_bt_size+bt_size+2)*sizeof(yasm_span *)); | 1205 int j; |
1200 if (bt_size > 0) | 1206 for (j=0; j<depspan->backtrace_size; j++) { |
1201 memcpy(&depspan->backtrace[dep_bt_size], optd->span->backtrace, | 1207 if (optd->span->backtrace[i] == optd->span->backtrace[j]) { |
1202 (bt_size-1)*sizeof(yasm_span *)); | 1208 present = 1; |
1203 depspan->backtrace[dep_bt_size+bt_size] = optd->span; | 1209 break; |
1204 depspan->backtrace[dep_bt_size+bt_size+1] = NULL; | 1210 } |
| 1211 } |
| 1212 if (present) |
| 1213 continue; |
| 1214 /* Not already in array; add it. */ |
| 1215 if (depspan->backtrace_size >= depspan_bt_alloc) |
| 1216 { |
| 1217 depspan_bt_alloc *= 2; |
| 1218 depspan->backtrace = |
| 1219 yasm_xrealloc(depspan->backtrace, |
| 1220 depspan_bt_alloc*sizeof(yasm_span *)); |
| 1221 } |
| 1222 depspan->backtrace[depspan->backtrace_size] = optd->span->backtrace[i]; |
| 1223 depspan->backtrace_size++; |
| 1224 } |
| 1225 |
| 1226 /* Add ourselves. */ |
| 1227 if (depspan->backtrace_size >= depspan_bt_alloc) |
| 1228 { |
| 1229 depspan_bt_alloc++; |
| 1230 depspan->backtrace = |
| 1231 yasm_xrealloc(depspan->backtrace, |
| 1232 depspan_bt_alloc*sizeof(yasm_span *)); |
| 1233 } |
| 1234 depspan->backtrace[depspan->backtrace_size] = optd->span; |
| 1235 depspan->backtrace_size++; |
1205 } | 1236 } |
1206 | 1237 |
1207 static void | 1238 static void |
1208 optimize_term_expand(IntervalTreeNode *node, void *d) | 1239 optimize_term_expand(IntervalTreeNode *node, void *d) |
1209 { | 1240 { |
1210 optimize_data *optd = d; | 1241 optimize_data *optd = d; |
1211 yasm_span_term *term = node->data; | 1242 yasm_span_term *term = node->data; |
1212 yasm_span *span = term->span; | 1243 yasm_span *span = term->span; |
1213 long len_diff = optd->len_diff; | 1244 long len_diff = optd->len_diff; |
1214 long precbc_index, precbc2_index; | 1245 long precbc_index, precbc2_index; |
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1541 | 1572 |
1542 if (saw_error) { | 1573 if (saw_error) { |
1543 optimize_cleanup(&optd); | 1574 optimize_cleanup(&optd); |
1544 return; | 1575 return; |
1545 } | 1576 } |
1546 | 1577 |
1547 /* Step 3 */ | 1578 /* Step 3 */ |
1548 update_all_bc_offsets(object, errwarns); | 1579 update_all_bc_offsets(object, errwarns); |
1549 optimize_cleanup(&optd); | 1580 optimize_cleanup(&optd); |
1550 } | 1581 } |
OLD | NEW |