2020-02-27 06:14:38 +03:00
|
|
|
/*
|
|
|
|
* We adjust linker script modification to place all of the stuff that needs to
|
|
|
|
* persist across fuzzing runs into a contiguous section of memory. Then, it is
|
2020-02-20 07:11:11 +03:00
|
|
|
* easy to re-map the counter-related memory as shared.
|
2020-02-27 06:14:38 +03:00
|
|
|
*/
|
2020-02-20 07:11:11 +03:00
|
|
|
|
|
|
|
SECTIONS
|
|
|
|
{
|
|
|
|
.data.fuzz_start : ALIGN(4K)
|
|
|
|
{
|
|
|
|
__FUZZ_COUNTERS_START = .;
|
|
|
|
__start___sancov_cntrs = .;
|
|
|
|
*(_*sancov_cntrs);
|
|
|
|
__stop___sancov_cntrs = .;
|
|
|
|
|
|
|
|
/* Lowest stack counter */
|
|
|
|
*(__sancov_lowest_stack);
|
|
|
|
}
|
2020-11-06 01:18:57 +03:00
|
|
|
}
|
|
|
|
INSERT AFTER .data;
|
|
|
|
|
|
|
|
SECTIONS
|
|
|
|
{
|
2020-02-20 07:11:11 +03:00
|
|
|
.data.fuzz_ordered :
|
|
|
|
{
|
2020-02-27 06:14:38 +03:00
|
|
|
/*
|
|
|
|
* Coverage counters. They're not necessary for fuzzing, but are useful
|
2020-02-20 07:11:11 +03:00
|
|
|
* for analyzing the fuzzing performance
|
|
|
|
*/
|
|
|
|
__start___llvm_prf_cnts = .;
|
|
|
|
*(*llvm_prf_cnts);
|
|
|
|
__stop___llvm_prf_cnts = .;
|
|
|
|
|
|
|
|
/* Internal Libfuzzer TracePC object which contains the ValueProfileMap */
|
|
|
|
FuzzerTracePC*(.bss*);
|
2020-05-12 06:01:32 +03:00
|
|
|
/*
|
|
|
|
* In case the above line fails, explicitly specify the (mangled) name of
|
|
|
|
* the object we care about
|
|
|
|
*/
|
|
|
|
*(.bss._ZN6fuzzer3TPCE);
|
2020-02-20 07:11:11 +03:00
|
|
|
}
|
2020-11-06 01:18:57 +03:00
|
|
|
}
|
|
|
|
INSERT AFTER .data.fuzz_start;
|
|
|
|
|
|
|
|
SECTIONS
|
|
|
|
{
|
2020-02-20 07:11:11 +03:00
|
|
|
.data.fuzz_end : ALIGN(4K)
|
|
|
|
{
|
|
|
|
__FUZZ_COUNTERS_END = .;
|
|
|
|
}
|
|
|
|
}
|
2020-02-27 06:14:38 +03:00
|
|
|
/*
|
|
|
|
* Don't overwrite the SECTIONS in the default linker script. Instead insert the
|
|
|
|
* above into the default script
|
|
|
|
*/
|
2020-11-06 01:18:57 +03:00
|
|
|
INSERT AFTER .data.fuzz_ordered;
|