store output section offsets

master
Ales Katona 4 years ago
parent a6eb022b91
commit 16255e6036
Signed by: almindor
GPG Key ID: 2F773149BF38B48F

@ -13,22 +13,52 @@ pub enum SegmentType {
Bss,
}
pub struct SegmentSection {
si: SectionInfo,
output_offset: usize,
}
impl SegmentSection {
// TODO: refactor this into newtype swap with compile-time checking
pub fn section_info(&mut self, output_offset: usize) -> Result<&SectionInfo, Error> {
if self.output_offset > 0 {
Err(Error::InvalidSectionIndex) // duplicate access??
} else {
self.output_offset = output_offset;
Ok(&mut self.si)
}
}
}
impl From<SectionInfo> for SegmentSection {
fn from(si: SectionInfo) -> Self {
Self {
si,
output_offset: 0,
}
}
}
#[derive(Default)]
pub struct SegmentSections {
sections_info: Vec<SectionInfo>,
sections_info: Vec<SegmentSection>,
data_size: u64,
}
impl SegmentSections {
pub fn push(&mut self, si: SectionInfo) {
self.data_size += si.data_size;
self.sections_info.push(si);
self.sections_info.push(si.into());
}
pub fn iter(&self) -> impl Iterator<Item = &SectionInfo> {
pub fn iter(&self) -> impl Iterator<Item = &SegmentSection> {
self.sections_info.iter()
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut SegmentSection> {
self.sections_info.iter_mut()
}
pub fn data_size(&self) -> u64 {
self.data_size
}
@ -97,33 +127,30 @@ impl Loadable {
text.chain(rodata).chain(data).chain(bss)
}
pub fn program_bytes<'l, R: Relocatable>(
&'l self,
objects: &'l [R],
) -> impl Iterator<Item = Result<&'l [u8], Error>> {
let text_iter = self.text.iter();
let rodata_iter = self.rodata.iter();
pub fn program_sections(&mut self) -> impl Iterator<Item = &mut SegmentSection> {
let text_iter = self.text.iter_mut();
let rodata_iter = self.rodata.iter_mut();
let data1 = text_iter.filter_map(move |si| match si.file_size {
let data1 = text_iter.filter_map(move |ss| match ss.si.file_size {
0 => None,
_ => Some(objects[si.object_index].bytes(si.offset, si.file_size)),
_ => Some(ss),
});
let data2 = rodata_iter.filter_map(move |si| match si.data_size {
let data2 = rodata_iter.filter_map(move |ss| match ss.si.data_size {
0 => None,
_ => Some(objects[si.object_index].bytes(si.offset, si.file_size)),
_ => Some(ss),
});
data1.chain(data2)
}
pub fn data_bytes<'l, R: Relocatable>(
&'l self,
objects: &'l [R],
) -> impl Iterator<Item = Result<&'l [u8], Error>> {
let iter = self.data.iter().filter_map(move |si| match si.file_size {
0 => None,
_ => Some(objects[si.object_index].bytes(si.offset, si.file_size)),
});
pub fn data_sections(&mut self) -> impl Iterator<Item = &mut SegmentSection> {
let iter = self
.data
.iter_mut()
.filter_map(move |ss| match ss.si.file_size {
0 => None,
_ => Some(ss),
});
iter
}
@ -134,8 +161,8 @@ impl Loadable {
let rodata_iter = self.rodata.iter();
let mut result = 0u64;
for section in text_iter.chain(rodata_iter) {
result += section.data_size
for ss in text_iter.chain(rodata_iter) {
result += ss.si.data_size
}
result
@ -144,8 +171,8 @@ impl Loadable {
// data
pub fn data_size(&self) -> u64 {
let mut result = 0u64;
for section in self.data.iter() {
result += section.data_size
for ss in self.data.iter() {
result += ss.si.data_size
}
result
@ -154,8 +181,8 @@ impl Loadable {
// bss
pub fn bss_size(&self) -> u64 {
let mut result = 0u64;
for section in self.bss.iter() {
result += section.data_size
for ss in self.bss.iter() {
result += ss.si.data_size
}
result

@ -7,5 +7,5 @@ pub trait Output<R>
where
R: Relocatable,
{
fn finalize(self, objects: &[R], loadable: &Loadable) -> Result<PathBuf, Error>;
fn finalize(self, objects: &[R], loadable: &mut Loadable) -> Result<PathBuf, Error>;
}

@ -7,7 +7,7 @@ use elf_utilities::{
};
use crate::{
common::{expand_path, pad_to_next_page, Loadable, Output, SegmentType},
common::{expand_path, pad_to_next_page, Loadable, Output, Relocatable, SegmentType},
error::Error,
};
@ -58,7 +58,11 @@ impl ElfOutput {
}
impl Output<ElfObject> for ElfOutput {
fn finalize(mut self, objects: &[ElfObject], loadable: &Loadable) -> Result<PathBuf, Error> {
fn finalize(
mut self,
objects: &[ElfObject],
loadable: &mut Loadable,
) -> Result<PathBuf, Error> {
const EHS: u64 = size_of::<Ehdr64>() as u64;
const PHS: u16 = size_of::<Phdr64>() as u16;
const SHS: u16 = size_of::<Shdr64>() as u16;
@ -112,14 +116,27 @@ impl Output<ElfObject> for ElfOutput {
offset += pad_to_next_page(&mut self.writer, offset)?;
eprintln!("Prog start: {}", offset);
// write section/segment data
for bytes in loadable.program_bytes(objects) {
offset += self.writer.write(bytes?)?;
for ss in loadable.program_sections() {
let si = ss.section_info(offset)?;
let object = objects
.get(si.object_index)
.ok_or(Error::InvalidObjectIndex)?;
let bytes = object.bytes(si.offset, si.file_size)?;
offset += self.writer.write(bytes)?;
}
// for bytes in loadable.program_bytes(objects) {
// offset += self.writer.write(bytes?)?;
// }
offset += pad_to_next_page(&mut self.writer, offset)?;
eprintln!("Data start: {}", offset);
for bytes in loadable.data_bytes(objects) {
offset += self.writer.write(bytes?)?;
for ss in loadable.data_sections() {
let si = ss.section_info(offset)?;
let object = objects
.get(si.object_index)
.ok_or(Error::InvalidObjectIndex)?;
let bytes = object.bytes(si.offset, si.file_size)?;
offset += self.writer.write(bytes)?;
}
self.writer.flush()?;

@ -45,7 +45,7 @@ where
self.process_symbols()?;
self.loadable.set_start_offset(4096); // TODO: get from .start symbol location
self.output.finalize(&self.objects, &self.loadable)
self.output.finalize(&self.objects, &mut self.loadable)
}
fn process_symbols(&mut self) -> Result<(), Error> {

Loading…
Cancel
Save