#![allow(unused_variables)]
use crate::common::*;
use crate::lef_ast::*;
pub use crate::stream_parser::LefDefParseError;
use crate::stream_parser::*;
use itertools::PeekingNext;
use libreda_db::prelude as db;
use libreda_stream_parser::{tokenize, Tokenized};
use std::io::Read;
fn read_step_pattern<I>(tk: &mut Tokenized<I, LefDefLexer>) -> Result<StepPattern, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
tk.expect_str("DO")?;
let num_x: u64 = tk.take_and_parse()?;
tk.expect_str("BY")?;
let num_y: u64 = tk.take_and_parse()?;
tk.expect_str("STEP")?;
let space_x: f64 = tk.take_and_parse()?;
let space_y: f64 = tk.take_and_parse()?;
Ok(StepPattern {
num_x,
num_y,
space_x,
space_y,
})
}
fn read_layer_geometries<I>(
tk: &mut Tokenized<I, LefDefLexer>,
) -> Result<Option<LayerGeometries>, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
if tk.test_str("LAYER")? {
let mut layer = LayerGeometries::default();
let layer_name = tk.take_str()?;
layer.layer_name = layer_name;
layer.except_pg_net = tk.test_str("EXCEPTPGNET")?;
let spacing_or_min_design_rule_width = if tk.test_str("SPACING")? {
let min_spacing: f64 = tk.take_and_parse()?;
Some(SpacingOrDesignRuleWidth::MinSpacing(min_spacing))
} else if tk.test_str("DESIGNRULEWIDTH")? {
let effective_width: f64 = tk.take_and_parse()?;
Some(SpacingOrDesignRuleWidth::DesignRuleWidth(effective_width))
} else {
None
};
tk.expect_str(";")?;
layer.spacing_or_designrule_width = spacing_or_min_design_rule_width;
if tk.test_str("WIDTH")? {
let width: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
layer.width = Some(width);
}
loop {
if tk.test_str("PATH")? {
let iterate = tk.test_str("ITERATE")?;
let mut points = Vec::new();
while !tk.peeking_test_str("DO")? && !tk.peeking_test_str(";")? {
points.push(read_point::<f64, _>(tk)?.into())
}
let step_pattern = if iterate {
Some(read_step_pattern(tk)?)
} else {
None
};
tk.expect_str(";")?;
let width = layer.width.unwrap(); layer.geometries.push(Geometry {
step_pattern,
shape: Shape::Path(width, points),
})
} else if tk.test_str("RECT")? {
let iterate = tk.test_str("ITERATE")?;
let (p1, p2) = read_rect(tk)?;
let step_pattern = if iterate {
Some(read_step_pattern(tk)?)
} else {
None
};
tk.expect_str(";")?;
layer.geometries.push(Geometry {
step_pattern,
shape: Shape::Rect(p1.into(), p2.into()),
})
} else if tk.test_str("POLYGON")? {
let iterate = tk.test_str("ITERATE")?;
let mut points = Vec::new();
while !tk.peeking_test_str("DO")? && !tk.peeking_test_str(";")? {
points.push(read_point(tk)?.into())
}
let step_pattern = if iterate {
Some(read_step_pattern(tk)?)
} else {
None
};
tk.expect_str(";")?;
layer.geometries.push(Geometry {
step_pattern,
shape: Shape::Polygon(points),
})
} else if tk.test_str("VIA")? {
let iterate = tk.test_str("ITERATE")?;
let point: (f64, f64) = read_point(tk)?;
let via_name = tk.take_str()?;
if iterate {
let step_pattern = read_step_pattern(tk)?;
}
tk.expect_str(";")?;
return Err(LefDefParseError::NotImplemented(
"VIA inside layer geometry.",
));
} else {
break;
}
}
Ok(Some(layer))
} else {
Ok(None)
}
}
fn try_read_pin<I>(tk: &mut Tokenized<I, LefDefLexer>) -> Result<Option<MacroPin>, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
if tk.test_str("PIN")? {
let mut pin = MacroPin::default();
let pin_name = tk.take_str()?;
pin.name = pin_name;
loop {
if tk.test_str("END")? {
tk.expect_str(pin.name.as_str())?;
break;
} else if tk.test_str("TAPERRULE")? {
let rule_name = tk.take_str()?;
tk.expect_str(";")?;
pin.taper_rule = Some(rule_name);
} else if tk.test_str("DIRECTION")? {
let direction: PinDirection = tk.take_and_parse()?; let direction = match direction {
PinDirection::Output(_) => {
let tristate = tk.test_str("TRISTATE")?;
PinDirection::Output(tristate)
}
d => d,
};
tk.expect_str(";")?;
pin.direction = Some(direction);
} else if tk.test_str("USE")? {
let signal_use: SignalUse = tk.take_and_parse()?; tk.expect_str(";")?;
pin.signal_use = Some(signal_use);
} else if tk.test_str("NETEXPR")? {
let net_expr = tk.take_str()?;
tk.expect_str(";")?;
log::warn!("Skipping NETEXPR of the MACRO PIN (not implemented).");
} else if tk.test_str("SUPPLYSENSITIVITY")? {
let pin_name = tk.take_str()?;
tk.expect_str(";")?;
pin.supply_sensitivity = Some(pin_name);
} else if tk.test_str("GROUNDSENSITIVITY")? {
let pin_name = tk.take_str()?;
tk.expect_str(";")?;
pin.ground_sensitivity = Some(pin_name);
} else if tk.test_str("SHAPE")? {
let shape: PinShape = tk.take_and_parse()?; tk.expect_str(";")?;
pin.shape_type = Some(shape);
} else if tk.test_str("MUSTJOIN")? {
let pin_name = tk.take_str()?;
tk.expect_str(";")?;
pin.must_join = Some(pin_name);
} else if tk.test_str("PORT")? {
let mut port = MacroPinPort::default();
if tk.test_str("CLASS")? {
let class: PortClass = tk.take_and_parse()?; tk.expect_str(";")?;
port.class = Some(class);
}
while let Some(geo) = read_layer_geometries(tk)? {
port.geometries.push(geo);
}
tk.expect_str("END")?;
pin.ports.push(port);
} else if tk.test_str("PROPERTY")? {
tk.skip_until_str(";")?;
} else {
log::warn!("Skipping ANTENNA* properties of the MACRO PIN (not implemented).");
tk.skip_until_str(";")?;
}
}
Ok(Some(pin))
} else {
Ok(None)
}
}
fn try_read_obs<I>(
tk: &mut Tokenized<I, LefDefLexer>,
) -> Result<Option<Vec<LayerGeometries>>, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
if tk.test_str("OBS")? {
let mut geometries = Vec::new();
while let Some(geo) = read_layer_geometries(tk)? {
geometries.push(geo)
}
tk.expect_str("END")?;
Ok(Some(geometries))
} else {
Ok(None)
}
}
pub fn read_lef_bytes<R: Read>(reader: &mut R) -> Result<LEF, LefDefParseError> {
read_lef_chars(reader.bytes().map(|b| b.unwrap() as char))
}
pub fn read_lef_chars<I>(chars: I) -> Result<LEF, LefDefParseError>
where
I: Iterator<Item = char>,
{
let mut line_num = 0;
let mut char_num = 0; let line_count = chars.inspect(|&c| {
char_num += 1;
if c == '\n' {
line_num += 1;
char_num = 0;
}
});
let result = read_lef_impl(line_count);
if result.is_err() {
log::error!("LEF error on line: {} (at {})", line_num, char_num);
}
result
}
fn read_lef_impl<I>(chars: I) -> Result<LEF, LefDefParseError>
where
I: Iterator<Item = char>,
{
let mut library = LEF::default();
let mut tk = tokenize(chars, LefDefLexer {});
library.busbitchars = ('[', ']');
library.dividerchar = '/';
tk.advance(); loop {
if tk.test_str("END")? {
if tk.test_str("LIBRARY")? {
break;
}
} else if tk.test_str("VERSION")? {
let version = tk.take_str()?;
tk.expect_str(";")?;
library.version = Some(version);
} else if tk.test_str("BUSBITCHARS")? {
let chars = tk.take_str()?;
if chars.len() == 2 {
let start = chars.chars().nth(0).unwrap();
let end = chars.chars().nth(1).unwrap();
if start == end {
log::error!("Bus bit chars cannot be equal: {}", start);
return Err(LefDefParseError::IllegalBusBitChars(start, end));
}
log::debug!("Bus bit chars: '{}' '{}'", start, end);
library.busbitchars = (start, end);
} else {
return Err(LefDefParseError::InvalidCharacter); }
tk.expect_str(";")?;
} else if tk.test_str("NAMESCASESENSITIVE")? {
tk.take_str()?; tk.expect_str(";")?;
} else if tk.test_str("DIVIDERCHAR")? {
let divchar = tk.take_str()?;
if divchar.len() == 1 {
library.dividerchar = divchar.chars().nth(0).unwrap();
log::debug!("Divider char: '{}'", library.dividerchar);
} else {
return Err(LefDefParseError::InvalidCharacter); }
tk.expect_str(";")?;
}
else if tk.test_str("UNITS")? {
library.technology.units = read_units(&mut tk, library.technology.units)?;
} else if tk.test_str("CLEARANCEMEASURE")? {
library.technology.clearance_measure = tk.take_and_parse()?;
tk.expect_str(";")?;
}
else if tk.test_str("PROPERTYDEFINITIONS")? {
loop {
if tk.test_str("END")? {
tk.expect_str("PROPERTYDEFINITIONS")?;
break;
} else {
let object_type = tk.take_str()?;
let prop_name = tk.take_str()?;
let prop_type = tk.take_str()?;
if tk.test_str("RANGE")? {
let range_start = tk.take_str()?;
let range_end = tk.take_str()?;
}
if !(tk.test_str(";")?) {
let default_value = tk.take_str()?;
tk.expect_str(";")?;
}
library
.technology
.property_definitions
.insert(prop_name, ());
}
}
}
else if tk.test_str("MANUFACTURINGGRID")? {
let grid: f64 = tk.take_and_parse()?;
if grid < 0. {
log::warn!("MANUFACTURINGGRID must be positive ({}).", grid);
}
library.technology.manufacturing_grid = Some(grid);
tk.expect_str(";")?;
}
else if tk.test_str("MAXVIASTACK")? {
let value: u64 = tk.take_and_parse()?;
let range = if tk.test_str("RANGE")? {
let bottom_layer = tk.take_str()?;
let top_layer = tk.take_str()?;
Some((bottom_layer, top_layer))
} else {
None
};
library.technology.max_via_stack = Some((value, range));
tk.expect_str(";")?;
}
else if tk.test_str("NONDEFAULTRULE")? {
let hardspacing = tk.test_str("HARDSPACING")?;
return Err(LefDefParseError::NotImplemented("NONDEFAULTRULE"));
}
else if tk.test_str("LAYER")? {
let layer_name = tk.take_str()?;
tk.expect_str("TYPE")?;
if tk.test_str("CUT")? {
let mut layer = read_cut_layer(&mut tk)?;
layer.name = layer_name.clone();
library.technology.layers.push(Layer::Cut(layer));
} else if tk.test_str("ROUTING")? {
let mut layer = read_routing_layer(&mut tk)?;
layer.name = layer_name.clone();
library.technology.layers.push(Layer::Routing(layer));
} else if tk.test_str("MASTERSLICE")? {
let mut layer = read_masterslice_layer(&mut tk)?;
layer.name = layer_name.clone();
library.technology.layers.push(Layer::MasterSlice(layer));
} else if tk.test_str("OVERLAP")? {
loop {
if tk.test_str("END")? {
break;
} else {
tk.skip_until_str(";")?;
}
}
} else {
log::error!(
"Unsupported layer type '{}'.",
tk.current_token_str().unwrap()
);
}
tk.expect_str(layer_name.as_str())?;
}
else if tk.test_str("VIA")? {
let (via_name, via) = read_via(&mut tk)?;
if library.vias.contains_key(&via_name) {
return Err(LefDefParseError::Other("Via name is already used."));
}
library.vias.insert(via_name, via);
}
else if tk.test_str("VIARULE")? {
let via_rule_name = tk.take_str()?;
tk.expect_str("GENERATE")?;
let is_default = tk.test_str("DEFAULT")?;
for i in 0..2 {
tk.expect_str("LAYER")?;
let routing_layer_name = tk.take_str()?;
tk.expect_str(";")?;
tk.expect_str("ENCLOSURE")?;
let overhang1: f64 = tk.take_and_parse()?;
let overhang2: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
if tk.test_str("WIDTH")? {
let min_width: f64 = tk.take_and_parse()?;
tk.expect_str("TO")?;
let max_width: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
}
}
tk.expect_str("LAYER")?;
let cut_layer_name = tk.take_str()?;
tk.expect_str(";")?;
tk.expect_str("RECT")?;
let (p1, p2) = read_rect(&mut tk)?;
let rect: db::Rect<f64> = db::Rect::new(p1, p2);
tk.expect_str(";")?;
tk.expect_str("SPACING")?;
let x_spacing: f64 = tk.take_and_parse()?;
tk.expect_str("BY")?;
let y_spacing: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
if tk.test_str("RESISTANCE")? {
let resistance_per_cut: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
}
tk.expect_str("END")?;
tk.expect_str(via_rule_name.as_str())?;
}
else if tk.test_str("SPACING")? {
loop {
if tk.test_str("END")? {
tk.expect_str("SPACING")?;
break;
} else {
tk.expect_str("SAMENET")?;
let layer1 = tk.take_str()?;
let layer2 = tk.take_str()?;
let min_space: f64 = tk.take_and_parse()?;
let stack = tk.test_str("STACK")?;
tk.expect_str(";")?;
}
}
}
else if tk.test_str("SITE")? {
let site_name = tk.take_str()?;
let mut class: Option<SiteClass> = None;
let mut size: Option<(f64, f64)> = None;
let mut symmetry = Symmetry::default();
let mut row_pattern = vec![];
while !tk.test_str("END")? {
if tk.test_str("CLASS")? {
class = Some(tk.take_and_parse()?); tk.expect_str(";")?;
} else if tk.test_str("SYMMETRY")? {
loop {
if tk.test_str(";")? {
break;
} else {
symmetry = symmetry.union(tk.take_and_parse()?);
}
}
} else if tk.test_str("ROWPATTERN")? {
loop {
if tk.test_str(";")? {
break;
} else {
let previous_site_name = tk.take_str()?;
let site_orient: Orient = tk.take_and_parse()?;
row_pattern.push((previous_site_name, site_orient));
}
}
} else if tk.test_str("SIZE")? {
let width: f64 = tk.take_and_parse()?;
tk.expect_str("BY")?;
let heigth: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
size = Some((width, heigth));
}
}
tk.expect_str(site_name.as_str())?;
let site = SiteDefinition {
name: site_name.clone(),
size: size.ok_or(LefDefParseError::Other("SIZE must be specified in SITE."))?,
symmetry,
class: class.ok_or(LefDefParseError::Other("CLASS must be specified in SITE."))?,
row_pattern,
};
library.sites.insert(site_name, site);
}
else if tk.test_str("MACRO")? {
let mut m = Macro::default();
let macro_name = tk.take_str()?;
m.name = macro_name;
if tk.test_str("CLASS")? {
let class: MacroClass = tk.take_and_parse()?;
let with_sub_class = if !tk.test_str(";")? {
let with_sub_class = match class {
MacroClass::COVER(_) => {
let bump = tk.test_str("BUMP")?;
MacroClass::COVER(bump)
}
MacroClass::RING => MacroClass::RING,
MacroClass::BLOCK(_) => MacroClass::BLOCK(Some(tk.take_and_parse()?)),
MacroClass::PAD(_) => MacroClass::PAD(Some(tk.take_and_parse()?)),
MacroClass::CORE(_) => MacroClass::CORE(Some(tk.take_and_parse()?)),
MacroClass::ENDCAP(_) => MacroClass::ENDCAP(Some(tk.take_and_parse()?)),
};
tk.expect_str(";")?;
with_sub_class
} else {
class
};
m.class = Some(with_sub_class);
};
loop {
if tk.test_str("END")? {
break;
}
else if tk.test_str("FOREIGN")? {
let foreign_cell_name = tk.take_str()?;
let (point, orient) = if !tk.test_str(";")? {
let point = read_point(&mut tk)?;
let orient = if !tk.test_str(";")? {
let orient: Orient = tk.take_and_parse()?;
tk.expect_str(";")?;
orient
} else {
Default::default()
};
(point, orient)
} else {
(Default::default(), Default::default())
};
m.foreign.push((foreign_cell_name, point.into(), orient))
}
else if tk.test_str("ORIGIN")? {
let origin = read_point(&mut tk)?;
tk.expect_str(";")?;
m.origin = origin.into();
}
else if tk.test_str("EEQ")? {
let macro_name = tk.take_str()?;
tk.expect_str(";")?;
m.eeq = Some(macro_name);
}
else if tk.test_str("SIZE")? {
let width: f64 = tk.take_and_parse()?;
tk.expect_str("BY")?;
let height: f64 = tk.take_and_parse()?;
tk.expect_str(";")?;
m.size = Some((width, height));
}
else if tk.test_str("SYMMETRY")? {
let mut symmetry = Symmetry::default();
while !tk.test_str(";")? {
symmetry = symmetry.union(tk.take_and_parse()?);
}
m.symmetry = symmetry;
}
else if tk.test_str("SITE")? {
let mut site = Site::default();
site.name = tk.take_str()?;
if !tk.test_str(";")? {
let x_origin: f64 = tk.take_and_parse()?;
let y_origin: f64 = tk.take_and_parse()?;
site.origin = (x_origin, y_origin);
site.site_orient = tk.take_and_parse()?;
if !tk.test_str(";")? {
site.step_pattern = Some(read_step_pattern(&mut tk)?);
tk.expect_str(";")?;
}
}
m.sites.push(site);
}
else if let Some(pin) = try_read_pin(&mut tk)? {
m.pins.push(pin);
}
else if let Some(obs) = try_read_obs(&mut tk)? {
m.obs.extend(obs);
}
else if tk.test_str("DENSITY")? {
log::warn!("Skipping DENSITY.");
}
else if tk.test_str("PROPERTY")? {
let prop_name = tk.take_str()?;
let prop_value = tk.take_str()?;
tk.expect_str(";")?;
log::warn!("Skipping PROPERTY.");
}
}
tk.expect_str(m.name.as_str())?;
library.library.macros.insert(m.name.clone(), m);
} else {
return if let Some(token) = tk.current_token_str() {
Err(LefDefParseError::UnknownToken(token))
} else {
Err(LefDefParseError::UnexpectedEndOfFile)
};
}
}
Ok(library)
}
fn read_units<I>(
tk: &mut Tokenized<I, LefDefLexer>,
old_units: Units,
) -> Result<Units, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let mut units = old_units;
loop {
if tk.test_str("END")? {
tk.expect_str("UNITS")?;
break;
} else {
let mut time = units.time_ns;
let mut cap = units.capacitance_pf;
let mut res = units.resistance_ohms;
let mut power = units.power_mw;
let mut current = units.current_ma;
let mut voltage = units.voltage_v;
let mut database = units.database_microns;
let mut freq = units.frequency_mega_hz;
let mut unit_types = [
("TIME", "NANOSECONDS", &mut time),
("CAPACITANCE", "PICOFARADS", &mut cap),
("RESISTANCE", "OHMS", &mut res),
("POWER", "MILLIWATTS", &mut power),
("CURRENT", "MILLIAMPS", &mut current),
("VOLTAGE", "VOLTS", &mut voltage),
("DATABASE", "MICRONS", &mut database),
("FREQUENCY", "MEGAHERTZ", &mut freq),
];
for (unit_type, unit, value) in unit_types.iter_mut() {
if tk.test_str(unit_type)? {
tk.expect_str(unit)?;
let parsed = tk.take_and_parse::<u64>()?;
log::debug!("Unit: {} {} {}", unit_type, unit, parsed);
**value = parsed;
break;
}
}
units = Units {
time_ns: time,
capacitance_pf: cap,
resistance_ohms: res,
power_mw: power,
current_ma: current,
voltage_v: voltage,
database_microns: database,
frequency_mega_hz: freq,
};
tk.expect_str(";")?;
}
}
Ok(units)
}
fn read_via<I>(
tk: &mut Tokenized<I, LefDefLexer>,
) -> Result<(String, ViaDefinition), LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let via_name = tk.take_str()?;
let is_default = tk.test_str("DEFAULT")?;
let is_generated = tk.test_str("VIARULE")?;
let via_definition = if is_generated {
let mut via = GeneratedVia::default();
via.is_default = is_default;
via.rule_name = tk.take_str()?;
tk.expect_str(";")?;
tk.expect_str("CUTSIZE")?;
via.cut_size = (tk.take_and_parse()?, tk.take_and_parse()?);
tk.expect_str(";")?;
tk.expect_str("LAYERS")?;
via.layers = (tk.take_str()?, tk.take_str()?, tk.take_str()?);
tk.expect_str(";")?;
tk.expect_str("CUTSPACING")?;
via.cut_spacing = (tk.take_and_parse()?, tk.take_and_parse()?);
tk.expect_str(";")?;
tk.expect_str("ENCLOSURE")?;
via.enclosure = (
tk.take_and_parse()?,
tk.take_and_parse()?,
tk.take_and_parse()?,
tk.take_and_parse()?,
);
tk.expect_str(";")?;
if tk.test_str("ROWCOL")? {
via.num_rows_cols = Some((tk.take_and_parse()?, tk.take_and_parse()?));
tk.expect_str(";")?;
}
if tk.test_str("ORIGIN")? {
via.origin = Some((tk.take_and_parse()?, tk.take_and_parse()?));
tk.expect_str(";")?;
}
if tk.test_str("OFFSET")? {
via.offset = Some((
tk.take_and_parse()?,
tk.take_and_parse()?,
tk.take_and_parse()?,
tk.take_and_parse()?,
));
tk.expect_str(";")?;
}
if tk.test_str("PATTERN")? {
via.cut_pattern = Some(tk.take_str()?);
tk.expect_str(";")?;
}
ViaDefinition::GeneratedVia(via)
} else {
let mut via = FixedVia::default();
via.is_default = is_default;
loop {
if tk.test_str("RESISTANCE")? {
via.resistance = Some(tk.take_and_parse()?);
tk.expect_str(";")?
} else if tk.test_str("LAYER")? {
let layer_name = tk.take_str()?;
tk.expect_str(";")?;
let shapes = via.geometry.entry(layer_name).or_insert(vec![]);
loop {
if tk.test_str("RECT")? {
let mask_num = if tk.test_str("MASK")? {
Some(tk.take_and_parse()?)
} else {
None
};
let (p1, p2) = read_rect(tk)?;
tk.expect_str(";")?;
shapes.push(ViaShape {
mask_num,
shape: RectOrPolygon::Rect((p1.into(), p2.into())),
})
} else if tk.test_str("POLYGON")? {
let mask_num = if tk.test_str("MASK")? {
Some(tk.take_and_parse()?)
} else {
None
};
let points = read_polygon::<f64, _>(tk)?;
let points = points.into_iter().map(|p| p.into()).collect();
shapes.push(ViaShape {
mask_num,
shape: RectOrPolygon::Polygon(points),
})
} else {
break;
}
}
} else {
break;
}
}
ViaDefinition::FixedVia(via)
};
loop {
if tk.test_str("END")? {
tk.expect_str(via_name.as_str())?;
break;
} else {
tk.skip_until_str(";")?; }
}
Ok((via_name, via_definition))
}
fn read_routing_layer<I>(
tk: &mut Tokenized<I, LefDefLexer>,
) -> Result<RoutingLayer, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let mut layer = RoutingLayer::default();
loop {
if tk.test_str("END")? {
break;
} else if tk.test_str("MASK")? {
layer.mask_num = Some(tk.take_and_parse()?);
} else if tk.test_str("DIRECTION")? {
layer.direction = tk.take_and_parse()?;
tk.expect_str(";")?;
} else if tk.test_str("PITCH")? {
let x_pitch = tk.take_and_parse()?;
let y_pitch = if tk.test_str(";")? {
x_pitch
} else {
let y_pitch = tk.take_and_parse()?;
tk.expect_str(";")?;
y_pitch
};
layer.pitch = (x_pitch, y_pitch);
} else if tk.test_str("WIDTH")? {
layer.width = tk.take_and_parse()?;
tk.expect_str(";")?;
} else if tk.test_str("AREA")? {
layer.min_area = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else if tk.test_str("SPACING")? {
layer.spacing.push(read_spacing(tk)?);
} else if tk.test_str("SPACINGTABLE")? {
layer.spacing_table = Some(read_spacing_table(tk)?);
} else if tk.test_str("WIREEXTENSION")? {
layer.wire_extension = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else if tk.test_str("MAXWIDTH")? {
layer.max_width = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else if tk.test_str("MINWIDTH")? {
layer.min_width = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else if tk.test_str("RESISTANCE")? {
tk.expect_str("RPERSQ")?;
layer.resistance = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else if tk.test_str("CAPACITANCE")? {
tk.expect_str("CPERSQDIST")?;
layer.capacitance = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else if tk.test_str("HEIGHT")? {
layer.height = Some(tk.take_and_parse()?);
tk.expect_str(";")?;
} else {
log::debug!("Skip '{}'", tk.current_token_str().unwrap());
tk.skip_until_str(";")?;
}
}
Ok(layer)
}
fn read_cut_layer<I>(tk: &mut Tokenized<I, LefDefLexer>) -> Result<CutLayer, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let mut layer = CutLayer::default();
loop {
if tk.test_str("END")? {
break;
} else if tk.test_str("MASK")? {
layer.mask_num = Some(tk.take_and_parse()?);
} else if tk.test_str("SPACING")? {
let mut cut_spacing = CutSpacingRule::default();
cut_spacing.spacing = tk.take_and_parse()?;
cut_spacing.center_to_center = tk.test_str("CENTERTOCENTER")?;
cut_spacing.same_net = tk.test_str("SAMENET")?;
if tk.test_str("LAYER")? {
let second_layer_name = tk.take_str()?;
let stack = tk.test_str("STACK")?;
} else if tk.test_str("ADJACENTCUTS")? {
if !tk.peeking_test_str("WITHIN")? {
let n: u32 = tk.take_and_parse()?; }
tk.expect_str("WITHIN")?;
let cut_within: f64 = tk.take_and_parse()?;
let except_same_pg_net = tk.test_str("EXCEPTSAMEPGNET")?;
} else if tk.test_str("PARALLELOVERLAP")? {
} else if tk.test_str("AREA")? {
let cut_area: f64 = tk.take_and_parse()?;
}
tk.skip_until_str(";")?;
layer.spacing.push(cut_spacing);
} else if tk.test_str("PROPERTY")? {
let property = tk.take_str()?;
tk.skip_until_str(";")?;
} else {
log::debug!("Skip '{}'", tk.current_token_str().unwrap());
tk.skip_until_str(";")?;
}
}
Ok(layer)
}
fn read_masterslice_layer<I>(
tk: &mut Tokenized<I, LefDefLexer>,
) -> Result<MasterSliceLayer, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let mut layer = MasterSliceLayer::default();
loop {
if tk.test_str("END")? {
break;
} else if tk.test_str("MASK")? {
layer.mask_num = Some(tk.take_and_parse()?);
} else {
tk.skip_until_str(";")?;
}
}
Ok(layer)
}
fn read_spacing<I>(tk: &mut Tokenized<I, LefDefLexer>) -> Result<SpacingRules, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let mut spacing = SpacingRules::default();
spacing.min_spacing = tk.take_and_parse()?;
tk.skip_until_str(";")?;
Ok(spacing)
}
fn read_spacing_table<I>(
tk: &mut Tokenized<I, LefDefLexer>,
) -> Result<SpacingTable, LefDefParseError>
where
I: Iterator<Item = char> + PeekingNext,
{
let mut spacing_table = SpacingTable::default();
tk.expect_str("PARALLELRUNLENGTH")?;
while !tk.peeking_test_str("WIDTH")? & !tk.peeking_test_str(";")? {
let length: f64 = tk.take_and_parse()?;
spacing_table.parallel_run_lengths.push(length);
}
while tk.test_str("WIDTH")? {
spacing_table.widths.push(tk.take_and_parse()?);
let mut row_values = Vec::new();
while !tk.peeking_test_str("WIDTH")? & !tk.peeking_test_str(";")? {
let spacing: f64 = tk.take_and_parse()?;
row_values.push(spacing);
}
spacing_table.spacings.push(row_values)
}
tk.expect_str(";")?;
{
let table_width = spacing_table.parallel_run_lengths.len();
let consistent_row_sizes = spacing_table
.spacings
.iter()
.map(|s| s.len())
.all(|l| l == table_width);
if !consistent_row_sizes {
return Err(LefDefParseError::Other(
"Row sizes of SPACINGTABLE are not consistent.",
));
}
}
if tk.test_str("SPACINGTABLE")? {
unimplemented!("INFLUENCE or TWOWIDTHS tables are not implemented yet.");
}
Ok(spacing_table)
}
#[test]
fn test_read_lef_small() {
let data = r#"
# Parts from gscl45nm.lef.
VERSION 5.5 ;
NAMESCASESENSITIVE ON ;
BUSBITCHARS "[]" ;
DIVIDERCHAR "/" ;
PROPERTYDEFINITIONS
LAYER contactResistance REAL ;
END PROPERTYDEFINITIONS
UNITS
DATABASE MICRONS 2000 ;
END UNITS
MANUFACTURINGGRID 0.0025 ;
LAYER poly
TYPE MASTERSLICE ;
END poly
LAYER contact
TYPE CUT ;
SPACING 0.075 ;
PROPERTY contactResistance 10.5 ;
END contact
LAYER metal1
TYPE ROUTING ;
DIRECTION HORIZONTAL ;
PITCH 0.19 ;
WIDTH 0.065 ;
SPACING 0.065 ;
RESISTANCE RPERSQ 0.38 ;
END metal1
LAYER via1
TYPE CUT ;
SPACING 0.075 ;
PROPERTY contactResistance 5.69 ;
END via1
LAYER OVERLAP
TYPE OVERLAP ;
END OVERLAP
VIA M2_M1_via DEFAULT
LAYER metal1 ;
RECT -0.0675 -0.0325 0.0675 0.0325 ;
LAYER via1 ;
RECT -0.0325 -0.0325 0.0325 0.0325 ;
LAYER metal2 ;
RECT -0.035 -0.0675 0.035 0.0675 ;
END M2_M1_via
VIARULE M2_M1 GENERATE
LAYER metal1 ;
ENCLOSURE 0 0.035 ;
LAYER metal2 ;
ENCLOSURE 0 0.035 ;
LAYER via1 ;
RECT -0.0325 -0.0325 0.0325 0.0325 ;
SPACING 0.14 BY 0.14 ;
END M2_M1
VIARULE M1_POLY GENERATE
LAYER poly ;
ENCLOSURE 0 0 ;
LAYER metal1 ;
ENCLOSURE 0 0.035 ;
LAYER contact ;
RECT -0.0325 -0.0325 0.0325 0.0325 ;
SPACING 0.14 BY 0.14 ;
END M1_POLY
SPACING
SAMENET metal1 metal1 0.065 ;
SAMENET metal2 metal2 0.07 ;
SAMENET metal6 metal6 0.14 ;
SAMENET metal5 metal5 0.14 ;
SAMENET metal4 metal4 0.14 ;
SAMENET metal3 metal3 0.07 ;
SAMENET metal7 metal7 0.4 ;
SAMENET metal8 metal8 0.4 ;
SAMENET metal9 metal9 0.8 ;
SAMENET metal10 metal10 0.8 ;
END SPACING
SITE CoreSite
CLASS CORE ;
SIZE 0.38 BY 2.47 ;
END CoreSite
MACRO INVX1
CLASS CORE ;
ORIGIN 0 0 ;
FOREIGN INVX1 0 0 ;
SIZE 0.57 BY 2.47 ;
SYMMETRY X Y ;
SITE CoreSite ;
PIN A
DIRECTION INPUT ;
USE SIGNAL ;
PORT
LAYER metal1 ;
RECT 0.1575 0.4875 0.2575 0.6225 ;
END
END A
PIN Y
DIRECTION OUTPUT ;
USE SIGNAL ;
PORT
LAYER metal1 ;
RECT 0.3475 0.2175 0.4125 1.815 ;
RECT 0.3125 0.2175 0.4475 0.4225 ;
END
END Y
PIN gnd
DIRECTION INOUT ;
USE GROUND ;
SHAPE ABUTMENT ;
PORT
LAYER metal1 ;
RECT 0.1625 -0.065 0.2275 0.4225 ;
RECT 0 -0.065 0.57 0.065 ;
END
END gnd
PIN vdd
DIRECTION INOUT ;
USE POWER ;
SHAPE ABUTMENT ;
PORT
LAYER metal1 ;
RECT 0.1625 1.265 0.2275 2.535 ;
RECT 0 2.405 0.57 2.535 ;
END
END vdd
END INVX1
END LIBRARY
"#;
let result = read_lef_chars(data.chars());
dbg!(&result);
assert!(result.is_ok());
}