summaryrefslogtreecommitdiff
path: root/crates/typst-pdf
diff options
context:
space:
mode:
authorLaurenz <laurmaedje@gmail.com>2023-11-08 14:32:42 +0100
committerLaurenz <laurmaedje@gmail.com>2023-11-08 15:09:55 +0100
commit46846a337e8084acd46c70bccc2fca2659e9fb9a (patch)
tree9e9d6abf76867d823644e85e34571c70e07eea4f /crates/typst-pdf
parent80b4ca4c04cb5d911947895d9d04c87efb97b0f4 (diff)
Extract `typst-pdf` crate
Diffstat (limited to 'crates/typst-pdf')
-rw-r--r--crates/typst-pdf/Cargo.toml34
-rw-r--r--crates/typst-pdf/src/color.rs468
-rw-r--r--crates/typst-pdf/src/extg.rs35
-rw-r--r--crates/typst-pdf/src/font.rs268
-rw-r--r--crates/typst-pdf/src/gradient.rs581
-rw-r--r--crates/typst-pdf/src/icc/sGrey-v4.iccbin0 -> 360 bytes
-rw-r--r--crates/typst-pdf/src/icc/sRGB-v4.iccbin0 -> 480 bytes
-rw-r--r--crates/typst-pdf/src/image.rs170
-rw-r--r--crates/typst-pdf/src/lib.rs377
-rw-r--r--crates/typst-pdf/src/outline.rs191
-rw-r--r--crates/typst-pdf/src/page.rs759
-rw-r--r--crates/typst-pdf/src/postscript/hsl.ps63
-rw-r--r--crates/typst-pdf/src/postscript/hsv.ps62
-rw-r--r--crates/typst-pdf/src/postscript/oklab.ps78
14 files changed, 3086 insertions, 0 deletions
diff --git a/crates/typst-pdf/Cargo.toml b/crates/typst-pdf/Cargo.toml
new file mode 100644
index 00000000..74e38a69
--- /dev/null
+++ b/crates/typst-pdf/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "typst-pdf"
+description = "PDF exporter for Typst."
+version.workspace = true
+rust-version.workspace = true
+authors.workspace = true
+edition.workspace = true
+homepage.workspace = true
+repository.workspace = true
+license.workspace = true
+categories.workspace = true
+keywords.workspace = true
+
+[lib]
+doctest = false
+bench = false
+
+[dependencies]
+typst = { workspace = true }
+base64 = { workspace = true }
+bytemuck = { workspace = true }
+comemo = { workspace = true }
+ecow = { workspace = true}
+image = { workspace = true }
+miniz_oxide = { workspace = true }
+once_cell = { workspace = true }
+pdf-writer = { workspace = true }
+subsetter = { workspace = true }
+svg2pdf = { workspace = true }
+tracing = { workspace = true }
+ttf-parser = { workspace = true }
+unicode-properties = { workspace = true }
+unscanny = { workspace = true }
+xmp-writer = { workspace = true }
diff --git a/crates/typst-pdf/src/color.rs b/crates/typst-pdf/src/color.rs
new file mode 100644
index 00000000..80d277ed
--- /dev/null
+++ b/crates/typst-pdf/src/color.rs
@@ -0,0 +1,468 @@
+use once_cell::sync::Lazy;
+use pdf_writer::types::DeviceNSubtype;
+use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
+use typst::geom::{Color, ColorSpace, Paint};
+
+use crate::deflate;
+use crate::page::{PageContext, Transforms};
+
+// The names of the color spaces.
+pub const SRGB: Name<'static> = Name(b"srgb");
+pub const D65_GRAY: Name<'static> = Name(b"d65gray");
+pub const OKLAB: Name<'static> = Name(b"oklab");
+pub const HSV: Name<'static> = Name(b"hsv");
+pub const HSL: Name<'static> = Name(b"hsl");
+pub const LINEAR_SRGB: Name<'static> = Name(b"linearrgb");
+
+// The names of the color components.
+const OKLAB_L: Name<'static> = Name(b"L");
+const OKLAB_A: Name<'static> = Name(b"A");
+const OKLAB_B: Name<'static> = Name(b"B");
+const HSV_H: Name<'static> = Name(b"H");
+const HSV_S: Name<'static> = Name(b"S");
+const HSV_V: Name<'static> = Name(b"V");
+const HSL_H: Name<'static> = Name(b"H");
+const HSL_S: Name<'static> = Name(b"S");
+const HSL_L: Name<'static> = Name(b"L");
+
+// The ICC profiles.
+static SRGB_ICC_DEFLATED: Lazy<Vec<u8>> =
+ Lazy::new(|| deflate(include_bytes!("icc/sRGB-v4.icc")));
+static GRAY_ICC_DEFLATED: Lazy<Vec<u8>> =
+ Lazy::new(|| deflate(include_bytes!("icc/sGrey-v4.icc")));
+
+// The PostScript functions for color spaces.
+static OKLAB_DEFLATED: Lazy<Vec<u8>> =
+ Lazy::new(|| deflate(minify(include_str!("postscript/oklab.ps")).as_bytes()));
+static HSV_DEFLATED: Lazy<Vec<u8>> =
+ Lazy::new(|| deflate(minify(include_str!("postscript/hsv.ps")).as_bytes()));
+static HSL_DEFLATED: Lazy<Vec<u8>> =
+ Lazy::new(|| deflate(minify(include_str!("postscript/hsl.ps")).as_bytes()));
+
+/// The color spaces present in the PDF document
+#[derive(Default)]
+pub struct ColorSpaces {
+ oklab: Option<Ref>,
+ srgb: Option<Ref>,
+ d65_gray: Option<Ref>,
+ hsv: Option<Ref>,
+ hsl: Option<Ref>,
+ use_linear_rgb: bool,
+}
+
+impl ColorSpaces {
+ /// Get a reference to the oklab color space.
+ ///
+ /// # Warning
+ /// The A and B components of the color must be offset by +0.4 before being
+ /// encoded into the PDF file.
+ pub fn oklab(&mut self, alloc: &mut Ref) -> Ref {
+ *self.oklab.get_or_insert_with(|| alloc.bump())
+ }
+
+ /// Get a reference to the srgb color space.
+ pub fn srgb(&mut self, alloc: &mut Ref) -> Ref {
+ *self.srgb.get_or_insert_with(|| alloc.bump())
+ }
+
+ /// Get a reference to the gray color space.
+ pub fn d65_gray(&mut self, alloc: &mut Ref) -> Ref {
+ *self.d65_gray.get_or_insert_with(|| alloc.bump())
+ }
+
+ /// Get a reference to the hsv color space.
+ ///
+ /// # Warning
+ /// The Hue component of the color must be in degrees and must be divided
+ /// by 360.0 before being encoded into the PDF file.
+ pub fn hsv(&mut self, alloc: &mut Ref) -> Ref {
+ *self.hsv.get_or_insert_with(|| alloc.bump())
+ }
+
+ /// Get a reference to the hsl color space.
+ ///
+ /// # Warning
+ /// The Hue component of the color must be in degrees and must be divided
+ /// by 360.0 before being encoded into the PDF file.
+ pub fn hsl(&mut self, alloc: &mut Ref) -> Ref {
+ *self.hsl.get_or_insert_with(|| alloc.bump())
+ }
+
+ /// Mark linear RGB as used.
+ pub fn linear_rgb(&mut self) {
+ self.use_linear_rgb = true;
+ }
+
+ /// Write the color space on usage.
+ pub fn write(
+ &mut self,
+ color_space: ColorSpace,
+ writer: writers::ColorSpace,
+ alloc: &mut Ref,
+ ) {
+ match color_space {
+ ColorSpace::Oklab => {
+ let mut oklab = writer.device_n([OKLAB_L, OKLAB_A, OKLAB_B]);
+ self.write(ColorSpace::LinearRgb, oklab.alternate_color_space(), alloc);
+ oklab.tint_ref(self.oklab(alloc));
+ oklab.attrs().subtype(DeviceNSubtype::DeviceN);
+ }
+ ColorSpace::Srgb => writer.icc_based(self.srgb(alloc)),
+ ColorSpace::D65Gray => writer.icc_based(self.d65_gray(alloc)),
+ ColorSpace::LinearRgb => {
+ writer.cal_rgb(
+ [0.9505, 1.0, 1.0888],
+ None,
+ Some([1.0, 1.0, 1.0]),
+ Some([
+ 0.4124, 0.2126, 0.0193, 0.3576, 0.715, 0.1192, 0.1805, 0.0722,
+ 0.9505,
+ ]),
+ );
+ }
+ ColorSpace::Hsl => {
+ let mut hsl = writer.device_n([HSL_H, HSL_S, HSL_L]);
+ self.write(ColorSpace::Srgb, hsl.alternate_color_space(), alloc);
+ hsl.tint_ref(self.hsl(alloc));
+ hsl.attrs().subtype(DeviceNSubtype::DeviceN);
+ }
+ ColorSpace::Hsv => {
+ let mut hsv = writer.device_n([HSV_H, HSV_S, HSV_V]);
+ self.write(ColorSpace::Srgb, hsv.alternate_color_space(), alloc);
+ hsv.tint_ref(self.hsv(alloc));
+ hsv.attrs().subtype(DeviceNSubtype::DeviceN);
+ }
+ ColorSpace::Cmyk => writer.device_cmyk(),
+ }
+ }
+
+ // Write the color spaces to the PDF file.
+ pub fn write_color_spaces(&mut self, mut spaces: Dict, alloc: &mut Ref) {
+ if self.oklab.is_some() {
+ self.write(ColorSpace::Oklab, spaces.insert(OKLAB).start(), alloc);
+ }
+
+ if self.srgb.is_some() {
+ self.write(ColorSpace::Srgb, spaces.insert(SRGB).start(), alloc);
+ }
+
+ if self.d65_gray.is_some() {
+ self.write(ColorSpace::D65Gray, spaces.insert(D65_GRAY).start(), alloc);
+ }
+
+ if self.hsv.is_some() {
+ self.write(ColorSpace::Hsv, spaces.insert(HSV).start(), alloc);
+ }
+
+ if self.hsl.is_some() {
+ self.write(ColorSpace::Hsl, spaces.insert(HSL).start(), alloc);
+ }
+
+ if self.use_linear_rgb {
+ self.write(ColorSpace::LinearRgb, spaces.insert(LINEAR_SRGB).start(), alloc);
+ }
+ }
+
+ /// Write the necessary color spaces functions and ICC profiles to the
+ /// PDF file.
+ pub fn write_functions(&self, chunk: &mut Chunk) {
+ // Write the Oklab function & color space.
+ if let Some(oklab) = self.oklab {
+ chunk
+ .post_script_function(oklab, &OKLAB_DEFLATED)
+ .domain([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .filter(Filter::FlateDecode);
+ }
+
+ // Write the HSV function & color space.
+ if let Some(hsv) = self.hsv {
+ chunk
+ .post_script_function(hsv, &HSV_DEFLATED)
+ .domain([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .filter(Filter::FlateDecode);
+ }
+
+ // Write the HSL function & color space.
+ if let Some(hsl) = self.hsl {
+ chunk
+ .post_script_function(hsl, &HSL_DEFLATED)
+ .domain([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .filter(Filter::FlateDecode);
+ }
+
+ // Write the sRGB color space.
+ if let Some(srgb) = self.srgb {
+ chunk
+ .icc_profile(srgb, &SRGB_ICC_DEFLATED)
+ .n(3)
+ .range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
+ .filter(Filter::FlateDecode);
+ }
+
+ // Write the gray color space.
+ if let Some(gray) = self.d65_gray {
+ chunk
+ .icc_profile(gray, &GRAY_ICC_DEFLATED)
+ .n(1)
+ .range([0.0, 1.0])
+ .filter(Filter::FlateDecode);
+ }
+ }
+}
+
+/// This function removes comments, line spaces and carriage returns from a
+/// PostScript program. This is necessary to optimize the size of the PDF file.
+fn minify(source: &str) -> String {
+ let mut buf = String::with_capacity(source.len());
+ let mut s = unscanny::Scanner::new(source);
+ while let Some(c) = s.eat() {
+ match c {
+ '%' => {
+ s.eat_until('\n');
+ }
+ c if c.is_whitespace() => {
+ s.eat_whitespace();
+ if buf.ends_with(|c: char| !c.is_whitespace()) {
+ buf.push(' ');
+ }
+ }
+ _ => buf.push(c),
+ }
+ }
+ buf
+}
+
+/// Encodes the color into four f32s, which can be used in a PDF file.
+/// Ensures that the values are in the range [0.0, 1.0].
+///
+/// # Why?
+/// - Oklab: The a and b components are in the range [-0.4, 0.4] and the PDF
+/// specifies (and some readers enforce) that all color values be in the range
+/// [0.0, 1.0]. This means that the PostScript function and the encoded color
+/// must be offset by 0.4.
+/// - HSV/HSL: The hue component is in the range [0.0, 360.0] and the PDF format
+/// specifies that it must be in the range [0.0, 1.0]. This means that the
+/// PostScript function and the encoded color must be divided by 360.0.
+pub trait ColorEncode {
+ /// Performs the color to PDF f32 array conversion.
+ fn encode(&self, color: Color) -> [f32; 4];
+}
+
+impl ColorEncode for ColorSpace {
+ fn encode(&self, color: Color) -> [f32; 4] {
+ match self {
+ ColorSpace::Oklab => {
+ let [l, a, b, alpha] = color.to_oklab().to_vec4();
+ [l, (a + 0.4).clamp(0.0, 1.0), (b + 0.4).clamp(0.0, 1.0), alpha]
+ }
+ ColorSpace::Hsl => {
+ let [h, s, l, _] = color.to_hsl().to_vec4();
+ [h / 360.0, s, l, 0.0]
+ }
+ ColorSpace::Hsv => {
+ let [h, s, v, _] = color.to_hsv().to_vec4();
+ [h / 360.0, s, v, 0.0]
+ }
+ _ => color.to_vec4(),
+ }
+ }
+}
+
+/// Encodes a paint into either a fill or stroke color.
+pub(super) trait PaintEncode {
+ /// Set the paint as the fill color.
+ fn set_as_fill(&self, ctx: &mut PageContext, on_text: bool, transforms: Transforms);
+
+ /// Set the paint as the stroke color.
+ fn set_as_stroke(&self, ctx: &mut PageContext, on_text: bool, transforms: Transforms);
+}
+
+impl PaintEncode for Paint {
+ fn set_as_fill(&self, ctx: &mut PageContext, on_text: bool, transforms: Transforms) {
+ match self {
+ Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
+ Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
+ }
+ }
+
+ fn set_as_stroke(
+ &self,
+ ctx: &mut PageContext,
+ on_text: bool,
+ transforms: Transforms,
+ ) {
+ match self {
+ Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
+ Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
+ }
+ }
+}
+
+impl PaintEncode for Color {
+ fn set_as_fill(&self, ctx: &mut PageContext, _: bool, _: Transforms) {
+ match self {
+ Color::Luma(_) => {
+ ctx.parent.colors.d65_gray(&mut ctx.parent.alloc);
+ ctx.set_fill_color_space(D65_GRAY);
+
+ let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
+ ctx.content.set_fill_color([l]);
+ }
+ Color::Oklab(_) => {
+ ctx.parent.colors.oklab(&mut ctx.parent.alloc);
+ ctx.set_fill_color_space(OKLAB);
+
+ let [l, a, b, _] = ColorSpace::Oklab.encode(*self);
+ ctx.content.set_fill_color([l, a, b]);
+ }
+ Color::LinearRgb(_) => {
+ ctx.parent.colors.linear_rgb();
+ ctx.set_fill_color_space(LINEAR_SRGB);
+
+ let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
+ ctx.content.set_fill_color([r, g, b]);
+ }
+ Color::Rgba(_) => {
+ ctx.parent.colors.srgb(&mut ctx.parent.alloc);
+ ctx.set_fill_color_space(SRGB);
+
+ let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
+ ctx.content.set_fill_color([r, g, b]);
+ }
+ Color::Cmyk(_) => {
+ ctx.reset_fill_color_space();
+
+ let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
+ ctx.content.set_fill_cmyk(c, m, y, k);
+ }
+ Color::Hsl(_) => {
+ ctx.parent.colors.hsl(&mut ctx.parent.alloc);
+ ctx.set_fill_color_space(HSL);
+
+ let [h, s, l, _] = ColorSpace::Hsl.encode(*self);
+ ctx.content.set_fill_color([h, s, l]);
+ }
+ Color::Hsv(_) => {
+ ctx.parent.colors.hsv(&mut ctx.parent.alloc);
+ ctx.set_fill_color_space(HSV);
+
+ let [h, s, v, _] = ColorSpace::Hsv.encode(*self);
+ ctx.content.set_fill_color([h, s, v]);
+ }
+ }
+ }
+
+ fn set_as_stroke(&self, ctx: &mut PageContext, _: bool, _: Transforms) {
+ match self {
+ Color::Luma(_) => {
+ ctx.parent.colors.d65_gray(&mut ctx.parent.alloc);
+ ctx.set_stroke_color_space(D65_GRAY);
+
+ let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
+ ctx.content.set_stroke_color([l]);
+ }
+ Color::Oklab(_) => {
+ ctx.parent.colors.oklab(&mut ctx.parent.alloc);
+ ctx.set_stroke_color_space(OKLAB);
+
+ let [l, a, b, _] = ColorSpace::Oklab.encode(*self);
+ ctx.content.set_stroke_color([l, a, b]);
+ }
+ Color::LinearRgb(_) => {
+ ctx.parent.colors.linear_rgb();
+ ctx.set_stroke_color_space(LINEAR_SRGB);
+
+ let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
+ ctx.content.set_stroke_color([r, g, b]);
+ }
+ Color::Rgba(_) => {
+ ctx.parent.colors.srgb(&mut ctx.parent.alloc);
+ ctx.set_stroke_color_space(SRGB);
+
+ let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
+ ctx.content.set_stroke_color([r, g, b]);
+ }
+ Color::Cmyk(_) => {
+ ctx.reset_stroke_color_space();
+
+ let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
+ ctx.content.set_stroke_cmyk(c, m, y, k);
+ }
+ Color::Hsl(_) => {
+ ctx.parent.colors.hsl(&mut ctx.parent.alloc);
+ ctx.set_stroke_color_space(HSL);
+
+ let [h, s, l, _] = ColorSpace::Hsl.encode(*self);
+ ctx.content.set_stroke_color([h, s, l]);
+ }
+ Color::Hsv(_) => {
+ ctx.parent.colors.hsv(&mut ctx.parent.alloc);
+ ctx.set_stroke_color_space(HSV);
+
+ let [h, s, v, _] = ColorSpace::Hsv.encode(*self);
+ ctx.content.set_stroke_color([h, s, v]);
+ }
+ }
+ }
+}
+
+/// Extra color space functions.
+pub(super) trait ColorSpaceExt {
+ /// Returns the range of the color space.
+ fn range(self) -> [f32; 6];
+
+ /// Converts a color to the color space.
+ fn convert<U: QuantizedColor>(self, color: Color) -> [U; 3];
+}
+
+impl ColorSpaceExt for ColorSpace {
+ fn range(self) -> [f32; 6] {
+ [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
+ }
+
+ fn convert<U: QuantizedColor>(self, color: Color) -> [U; 3] {
+ let range = self.range();
+ let [x, y, z, _] = color.to_space(self).to_vec4();
+
+ // We need to add 0.4 to y and z for Oklab
+ // This is because DeviceN color spaces in PDF can **only** be in
+ // the range 0..1 and some readers enforce that.
+ // The oklab color space is in the range -0.4..0.4
+ // Also map the angle range of HSV/HSL to 0..1 instead of 0..360
+ let [x, y, z] = match self {
+ Self::Oklab => [x, y + 0.4, z + 0.4],
+ Self::Hsv | Self::Hsl => [x / 360.0, y, z],
+ _ => [x, y, z],
+ };
+
+ [
+ U::quantize(x, [range[0], range[1]]),
+ U::quantize(y, [range[2], range[3]]),
+ U::quantize(z, [range[4], range[5]]),
+ ]
+ }
+}
+
+/// Quantizes a color component to a specific type.
+pub(super) trait QuantizedColor {
+ fn quantize(color: f32, range: [f32; 2]) -> Self;
+}
+
+impl QuantizedColor for u16 {
+ fn quantize(color: f32, range: [f32; 2]) -> Self {
+ let value = (color - range[0]) / (range[1] - range[0]);
+ (value.max(0.0).min(1.0) * Self::MAX as f32)
+ .round()
+ .max(0.0)
+ .min(Self::MAX as f32) as Self
+ }
+}
+
+impl QuantizedColor for f32 {
+ fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
+ color.clamp(min, max)
+ }
+}
diff --git a/crates/typst-pdf/src/extg.rs b/crates/typst-pdf/src/extg.rs
new file mode 100644
index 00000000..f7bd116d
--- /dev/null
+++ b/crates/typst-pdf/src/extg.rs
@@ -0,0 +1,35 @@
+use super::PdfContext;
+
+/// A PDF external graphics state.
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
+pub struct ExtGState {
+ // In the range 0-255, needs to be divided before being written into the graphics state!
+ pub stroke_opacity: u8,
+ // In the range 0-255, needs to be divided before being written into the graphics state!
+ pub fill_opacity: u8,
+}
+
+impl Default for ExtGState {
+ fn default() -> Self {
+ Self { stroke_opacity: 255, fill_opacity: 255 }
+ }
+}
+
+impl ExtGState {
+ pub fn uses_opacities(&self) -> bool {
+ self.stroke_opacity != 255 || self.fill_opacity != 255
+ }
+}
+
+/// Embed all used external graphics states into the PDF.
+#[tracing::instrument(skip_all)]
+pub fn write_external_graphics_states(ctx: &mut PdfContext) {
+ for external_gs in ctx.extg_map.items() {
+ let id = ctx.alloc.bump();
+ ctx.ext_gs_refs.push(id);
+ ctx.pdf
+ .ext_graphics(id)
+ .non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
+ .stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
+ }
+}
diff --git a/crates/typst-pdf/src/font.rs b/crates/typst-pdf/src/font.rs
new file mode 100644
index 00000000..ccf2f403
--- /dev/null
+++ b/crates/typst-pdf/src/font.rs
@@ -0,0 +1,268 @@
+use std::collections::BTreeMap;
+use std::sync::Arc;
+
+use ecow::{eco_format, EcoString};
+use pdf_writer::types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap};
+use pdf_writer::{Filter, Finish, Name, Rect, Str};
+use ttf_parser::{name_id, GlyphId, Tag};
+use typst::font::Font;
+use typst::util::SliceExt;
+use unicode_properties::{GeneralCategory, UnicodeGeneralCategory};
+
+use crate::{deflate, EmExt, PdfContext};
+
+const CFF: Tag = Tag::from_bytes(b"CFF ");
+const CFF2: Tag = Tag::from_bytes(b"CFF2");
+const CMAP_NAME: Name = Name(b"Custom");
+const SYSTEM_INFO: SystemInfo = SystemInfo {
+ registry: Str(b"Adobe"),
+ ordering: Str(b"Identity"),
+ supplement: 0,
+};
+
+/// Embed all used fonts into the PDF.
+#[tracing::instrument(skip_all)]
+pub fn write_fonts(ctx: &mut PdfContext) {
+ for font in ctx.font_map.items() {
+ let type0_ref = ctx.alloc.bump();
+ let cid_ref = ctx.alloc.bump();
+ let descriptor_ref = ctx.alloc.bump();
+ let cmap_ref = ctx.alloc.bump();
+ let data_ref = ctx.alloc.bump();
+ ctx.font_refs.push(type0_ref);
+
+ let glyph_set = ctx.glyph_sets.get_mut(font).unwrap();
+ let metrics = font.metrics();
+ let ttf = font.ttf();
+
+ // Do we have a TrueType or CFF font?
+ //
+ // FIXME: CFF2 must be handled differently and requires PDF 2.0
+ // (or we have to convert it to CFF).
+ let is_cff = ttf
+ .raw_face()
+ .table(CFF)
+ .or_else(|| ttf.raw_face().table(CFF2))
+ .is_some();
+
+ let postscript_name = font
+ .find_name(name_id::POST_SCRIPT_NAME)
+ .unwrap_or_else(|| "unknown".to_string());
+
+ let subset_tag = subset_tag(glyph_set);
+ let base_font = eco_format!("{subset_tag}+{postscript_name}");
+ let base_font_type0 = if is_cff {
+ eco_format!("{base_font}-Identity-H")
+ } else {
+ base_font.clone()
+ };
+
+ // Write the base font object referencing the CID font.
+ ctx.pdf
+ .type0_font(type0_ref)
+ .base_font(Name(base_font_type0.as_bytes()))
+ .encoding_predefined(Name(b"Identity-H"))
+ .descendant_font(cid_ref)
+ .to_unicode(cmap_ref);
+
+ // Write the CID font referencing the font descriptor.
+ let mut cid = ctx.pdf.cid_font(cid_ref);
+ cid.subtype(if is_cff { CidFontType::Type0 } else { CidFontType::Type2 });
+ cid.base_font(Name(base_font.as_bytes()));
+ cid.system_info(SYSTEM_INFO);
+ cid.font_descriptor(descriptor_ref);
+ cid.default_width(0.0);
+ if !is_cff {
+ cid.cid_to_gid_map_predefined(Name(b"Identity"));
+ }
+
+ // Extract the widths of all glyphs.
+ let mut widths = vec![];
+ for gid in std::iter::once(0).chain(glyph_set.keys().copied()) {
+ let width = ttf.glyph_hor_advance(GlyphId(gid)).unwrap_or(0);
+ let units = font.to_em(width).to_font_units();
+ let cid = glyph_cid(font, gid);
+ if usize::from(cid) >= widths.len() {
+ widths.resize(usize::from(cid) + 1, 0.0);
+ widths[usize::from(cid)] = units;
+ }
+ }
+
+ // Write all non-zero glyph widths.
+ let mut first = 0;
+ let mut width_writer = cid.widths();
+ for (w, group) in widths.group_by_key(|&w| w) {
+ let end = first + group.len();
+ if w != 0.0 {
+ let last = end - 1;
+ width_writer.same(first as u16, last as u16, w);
+ }
+ first = end;
+ }
+
+ width_writer.finish();
+ cid.finish();
+
+ let mut flags = FontFlags::empty();
+ flags.set(FontFlags::SERIF, postscript_name.contains("Serif"));
+ flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
+ flags.set(FontFlags::ITALIC, ttf.is_italic());
+ flags.insert(FontFlags::SYMBOLIC);
+ flags.insert(FontFlags::SMALL_CAP);
+
+ let global_bbox = ttf.global_bounding_box();
+ let bbox = Rect::new(
+ font.to_em(global_bbox.x_min).to_font_units(),
+ font.to_em(global_bbox.y_min).to_font_units(),
+ font.to_em(global_bbox.x_max).to_font_units(),
+ font.to_em(global_bbox.y_max).to_font_units(),
+ );
+
+ let italic_angle = ttf.italic_angle().unwrap_or(0.0);
+ let ascender = metrics.ascender.to_font_units();
+ let descender = metrics.descender.to_font_units();
+ let cap_height = metrics.cap_height.to_font_units();
+ let stem_v = 10.0 + 0.244 * (f32::from(ttf.weight().to_number()) - 50.0);
+
+ // Write the font descriptor (contains metrics about the font).
+ let mut font_descriptor = ctx.pdf.font_descriptor(descriptor_ref);
+ font_descriptor
+ .name(Name(base_font.as_bytes()))
+ .flags(flags)
+ .bbox(bbox)
+ .italic_angle(italic_angle)
+ .ascent(ascender)
+ .descent(descender)
+ .cap_height(cap_height)
+ .stem_v(stem_v);
+
+ if is_cff {
+ font_descriptor.font_file3(data_ref);
+ } else {
+ font_descriptor.font_file2(data_ref);
+ }
+
+ font_descriptor.finish();
+
+ // Write the /ToUnicode character map, which maps glyph ids back to
+ // unicode codepoints to enable copying out of the PDF.
+ let cmap = create_cmap(ttf, glyph_set);
+ ctx.pdf.cmap(cmap_ref, &cmap.finish());
+
+ // Subset and write the font's bytes.
+ let glyphs: Vec<_> = glyph_set.keys().copied().collect();
+ let data = subset_font(font, &glyphs);
+
+ let mut stream = ctx.pdf.stream(data_ref, &data);
+ stream.filter(Filter::FlateDecode);
+ if is_cff {
+ stream.pair(Name(b"Subtype"), Name(b"CIDFontType0C"));
+ }
+
+ stream.finish();
+ }
+}
+
+/// Subset a font to the given glyphs.
+///
+/// - For a font with TrueType outlines, this returns the whole OpenType font.
+/// - For a font with CFF outlines, this returns just the CFF font program.
+#[comemo::memoize]
+fn subset_font(font: &Font, glyphs: &[u16]) -> Arc<Vec<u8>> {
+ let data = font.data();
+ let profile = subsetter::Profile::pdf(glyphs);
+ let subsetted = subsetter::subset(data, font.index(), profile);
+ let mut data = subsetted.as_deref().unwrap_or(data);
+
+ // Extract the standalone CFF font program if applicable.
+ let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
+ if let Some(cff) = raw.table(CFF) {
+ data = cff;
+ }
+
+ Arc::new(deflate(data))
+}
+
+/// Produce a unique 6 letter tag for a glyph set.
+fn subset_tag(glyphs: &BTreeMap<u16, EcoString>) -> EcoString {
+ const LEN: usize = 6;
+ const BASE: u128 = 26;
+ let mut hash = typst::util::hash128(&glyphs);
+ let mut letter = [b'A'; LEN];
+ for l in letter.iter_mut() {
+ *l = b'A' + (hash % BASE) as u8;
+ hash /= BASE;
+ }
+ std::str::from_utf8(&letter).unwrap().into()
+}
+
+/// Create a /ToUnicode CMap.
+fn create_cmap(
+ ttf: &ttf_parser::Face,
+ glyph_set: &mut BTreeMap<u16, EcoString>,
+) -> UnicodeCmap {
+ // For glyphs that have codepoints mapping to them in the font's cmap table,
+ // we prefer them over pre-existing text mappings from the document. Only
+ // things that don't have a corresponding codepoint (or only a private-use
+ // one) like the "Th" in Linux Libertine get the text of their first
+ // occurrences in the document instead.
+ for subtable in ttf.tables().cmap.into_iter().flat_map(|table| table.subtables) {
+ if !subtable.is_unicode() {
+ continue;
+ }
+
+ subtable.codepoints(|n| {
+ let Some(c) = std::char::from_u32(n) else { return };
+ if c.general_category() == GeneralCategory::PrivateUse {
+ return;
+ }
+
+ let Some(GlyphId(g)) = ttf.glyph_index(c) else { return };
+ if glyph_set.contains_key(&g) {
+ glyph_set.insert(g, c.into());
+ }
+ });
+ }
+
+ // Produce a reverse mapping from glyphs to unicode strings.
+ let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
+ for (&g, text) in glyph_set.iter() {
+ if !text.is_empty() {
+ cmap.pair_with_multiple(g, text.chars());
+ }
+ }
+
+ cmap
+}
+
+/// Get the CID for a glyph id.
+///
+/// When writing text into a PDF, we have to specify CIDs (character ids) not
+/// GIDs (glyph IDs).
+///
+/// Most of the time, the mapping between these two is an identity mapping. In
+/// particular, for TrueType fonts, the mapping is an identity mapping because
+/// of this line above:
+/// ```ignore
+/// cid.cid_to_gid_map_predefined(Name(b"Identity"));
+/// ```
+///
+/// However, CID-keyed CFF fonts may have a non-identity mapping defined in
+/// their charset. For those, we must map the glyph IDs in a `TextItem` to CIDs.
+/// The font defines the map through its charset. The charset usually maps
+/// glyphs to SIDs (string ids) specifying the glyph's name. Not for CID-keyed
+/// fonts though! For these, the SIDs are CIDs in disguise. Relevant quote from
+/// the CFF spec:
+///
+/// > The charset data, although in the same format as non-CIDFonts, will
+/// > represent CIDs rather than SIDs, [...]
+///
+/// This function performs the mapping from glyph ID to CID. It also works for
+/// non CID-keyed fonts. Then, it will simply return the glyph ID.
+pub(super) fn glyph_cid(font: &Font, glyph_id: u16) -> u16 {
+ font.ttf()
+ .tables()
+ .cff
+ .and_then(|cff| cff.glyph_cid(ttf_parser::GlyphId(glyph_id)))
+ .unwrap_or(glyph_id)
+}
diff --git a/crates/typst-pdf/src/gradient.rs b/crates/typst-pdf/src/gradient.rs
new file mode 100644
index 00000000..e5cae30e
--- /dev/null
+++ b/crates/typst-pdf/src/gradient.rs
@@ -0,0 +1,581 @@
+use std::f32::consts::{PI, TAU};
+use std::sync::Arc;
+
+use ecow::{eco_format, EcoString};
+use pdf_writer::types::FunctionShadingType;
+use pdf_writer::writers::StreamShadingType;
+use pdf_writer::{types::ColorSpaceOperand, Name};
+use pdf_writer::{Filter, Finish, Ref};
+use typst::geom::{
+ Abs, Angle, Color, ColorSpace, ConicGradient, Gradient, Numeric, Point, Quadrant,
+ Ratio, Relative, Transform, WeightedColor,
+};
+
+use crate::color::{ColorSpaceExt, PaintEncode, QuantizedColor};
+use crate::page::{PageContext, Transforms};
+use crate::{deflate, AbsExt, PdfContext};
+
+/// A unique-transform-aspect-ratio combination that will be encoded into the
+/// PDF.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct PdfGradient {
+ /// The transform to apply to the gradient.
+ pub transform: Transform,
+ /// The aspect ratio of the gradient.
+ /// Required for aspect ratio correction.
+ pub aspect_ratio: Ratio,
+ /// The gradient.
+ pub gradient: Gradient,
+ /// Whether the gradient is applied to text.
+ pub on_text: bool,
+}
+
+/// Writes the actual gradients (shading patterns) to the PDF.
+/// This is performed once after writing all pages.
+pub fn write_gradients(ctx: &mut PdfContext) {
+ for PdfGradient { transform, aspect_ratio, gradient, on_text } in
+ ctx.gradient_map.items().cloned().collect::<Vec<_>>()
+ {
+ let shading = ctx.alloc.bump();
+ ctx.gradient_refs.push(shading);
+
+ let mut shading_pattern = match &gradient {
+ Gradient::Linear(linear) => {
+ let shading_function = shading_function(ctx, &gradient);
+ let mut shading_pattern = ctx.pdf.shading_pattern(shading);
+ let mut shading = shading_pattern.function_shading();
+ shading.shading_type(FunctionShadingType::Axial);
+
+ ctx.colors
+ .write(gradient.space(), shading.color_space(), &mut ctx.alloc);
+
+ let angle = Gradient::correct_aspect_ratio(linear.angle, aspect_ratio);
+ let (sin, cos) = (angle.sin(), angle.cos());
+ let length = sin.abs() + cos.abs();
+
+ shading
+ .anti_alias(gradient.anti_alias())
+ .function(shading_function)
+ .coords([0.0, 0.0, length as f32, 0.0])
+ .extend([true; 2]);
+
+ shading.finish();
+
+ shading_pattern
+ }
+ Gradient::Radial(radial) => {
+ let shading_function = shading_function(ctx, &gradient);
+ let mut shading_pattern = ctx.pdf.shading_pattern(shading);
+ let mut shading = shading_pattern.function_shading();
+ shading.shading_type(FunctionShadingType::Radial);
+
+ ctx.colors
+ .write(gradient.space(), shading.color_space(), &mut ctx.alloc);
+
+ shading
+ .anti_alias(gradient.anti_alias())
+ .function(shading_function)
+ .coords([
+ radial.focal_center.x.get() as f32,
+ radial.focal_center.y.get() as f32,
+ radial.focal_radius.get() as f32,
+ radial.center.x.get() as f32,
+ radial.center.y.get() as f32,
+ radial.radius.get() as f32,
+ ])
+ .extend([true; 2]);
+
+ shading.finish();
+
+ shading_pattern
+ }
+ Gradient::Conic(conic) => {
+ let vertices = compute_vertex_stream(conic, aspect_ratio, on_text);
+
+ let stream_shading_id = ctx.alloc.bump();
+ let mut stream_shading =
+ ctx.pdf.stream_shading(stream_shading_id, &vertices);
+
+ ctx.colors.write(
+ conic.space,
+ stream_shading.color_space(),
+ &mut ctx.alloc,
+ );
+
+ let range = conic.space.range();
+ stream_shading
+ .bits_per_coordinate(16)
+ .bits_per_component(16)
+ .bits_per_flag(8)
+ .shading_type(StreamShadingType::CoonsPatch)
+ .decode([
+ 0.0, 1.0, 0.0, 1.0, range[0], range[1], range[2], range[3],
+ range[4], range[5],
+ ])
+ .anti_alias(gradient.anti_alias())
+ .filter(Filter::FlateDecode);
+
+ stream_shading.finish();
+
+ let mut shading_pattern = ctx.pdf.shading_pattern(shading);
+ shading_pattern.shading_ref(stream_shading_id);
+ shading_pattern
+ }
+ };
+
+ shading_pattern.matrix(transform_to_array(transform));
+ }
+}
+
+/// Writes an expotential or stitched function that expresses the gradient.
+fn shading_function(ctx: &mut PdfContext, gradient: &Gradient) -> Ref {
+ let function = ctx.alloc.bump();
+ let mut functions = vec![];
+ let mut bounds = vec![];
+ let mut encode = vec![];
+
+ // Create the individual gradient functions for each pair of stops.
+ for window in gradient.stops_ref().windows(2) {
+ let (first, second) = (window[0], window[1]);
+
+ // Skip stops with the same position.
+ if first.1.get() == second.1.get() {
+ continue;
+ }
+
+ // If the color space is HSL or HSV, and we cross the 0°/360° boundary,
+ // we need to create two separate stops.
+ if gradient.space() == ColorSpace::Hsl || gradient.space() == ColorSpace::Hsv {
+ let t1 = first.1.get() as f32;
+ let t2 = second.1.get() as f32;
+ let [h1, s1, x1, _] = first.0.to_space(gradient.space()).to_vec4();
+ let [h2, s2, x2, _] = second.0.to_space(gradient.space()).to_vec4();
+
+ // Compute the intermediary stop at 360°.
+ if (h1 - h2).abs() > 180.0 {
+ let h1 = if h1 < h2 { h1 + 360.0 } else { h1 };
+ let h2 = if h2 < h1 { h2 + 360.0 } else { h2 };
+
+ // We compute where the crossing happens between zero and one
+ let t = (360.0 - h1) / (h2 - h1);
+ // We then map it back to the original range.
+ let t_prime = t * (t2 - t1) + t1;
+
+ // If the crossing happens between the two stops,
+ // we need to create an extra stop.
+ if t_prime <= t2 && t_prime >= t1 {
+ bounds.push(t_prime);
+ bounds.push(t_prime);
+ bounds.push(t2);
+ encode.extend([0.0, 1.0]);
+ encode.extend([0.0, 1.0]);
+ encode.extend([0.0, 1.0]);
+
+ // These need to be individual function to encode 360.0 correctly.
+ let func1 = ctx.alloc.bump();
+ ctx.pdf
+ .exponential_function(func1)
+ .range(gradient.space().range())
+ .c0(gradient.space().convert(first.0))
+ .c1([1.0, s1 * (1.0 - t) + s2 * t, x1 * (1.0 - t) + x2 * t])
+ .domain([0.0, 1.0])
+ .n(1.0);
+
+ let func2 = ctx.alloc.bump();
+ ctx.pdf
+ .exponential_function(func2)
+ .range(gradient.space().range())
+ .c0([1.0, s1 * (1.0 - t) + s2 * t, x1 * (1.0 - t) + x2 * t])
+ .c1([0.0, s1 * (1.0 - t) + s2 * t, x1 * (1.0 - t) + x2 * t])
+ .domain([0.0, 1.0])
+ .n(1.0);
+
+ let func3 = ctx.alloc.bump();
+ ctx.pdf
+ .exponential_function(func3)
+ .range(gradient.space().range())
+ .c0([0.0, s1 * (1.0 - t) + s2 * t, x1 * (1.0 - t) + x2 * t])
+ .c1(gradient.space().convert(second.0))
+ .domain([0.0, 1.0])
+ .n(1.0);
+
+ functions.push(func1);
+ functions.push(func2);
+ functions.push(func3);
+
+ continue;
+ }
+ }
+ }
+
+ bounds.push(second.1.get() as f32);
+ functions.push(single_gradient(ctx, first.0, second.0, gradient.space()));
+ encode.extend([0.0, 1.0]);
+ }
+
+ // Special case for gradients with only two stops.
+ if functions.len() == 1 {
+ return functions[0];
+ }
+
+ // Remove the last bound, since it's not needed for the stitching function.
+ bounds.pop();
+
+ // Create the stitching function.
+ ctx.pdf
+ .stitching_function(function)
+ .domain([0.0, 1.0])
+ .range(gradient.space().range())
+ .functions(functions)
+ .bounds(bounds)
+ .encode(encode);
+
+ function
+}
+
+/// Writes an expontential function that expresses a single segment (between two
+/// stops) of a gradient.
+fn single_gradient(
+ ctx: &mut PdfContext,
+ first_color: Color,
+ second_color: Color,
+ color_space: ColorSpace,
+) -> Ref {
+ let reference = ctx.alloc.bump();
+
+ ctx.pdf
+ .exponential_function(reference)
+ .range(color_space.range())
+ .c0(color_space.convert(first_color))
+ .c1(color_space.convert(second_color))
+ .domain([0.0, 1.0])
+ .n(1.0);
+
+ reference
+}
+
+impl PaintEncode for Gradient {
+ fn set_as_fill(&self, ctx: &mut PageContext, on_text: bool, transforms: Transforms) {
+ ctx.reset_fill_color_space();
+
+ let id = register_gradient(ctx, self, on_text, transforms);
+ let name = Name(id.as_bytes());
+
+ ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
+ ctx.content.set_fill_pattern(None, name);
+ }
+
+ fn set_as_stroke(
+ &self,
+ ctx: &mut PageContext,
+ on_text: bool,
+ transforms: Transforms,
+ ) {
+ ctx.reset_stroke_color_space();
+
+ let id = register_gradient(ctx, self, on_text, transforms);
+ let name = Name(id.as_bytes());
+
+ ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
+ ctx.content.set_stroke_pattern(None, name);
+ }
+}
+
+/// Deduplicates a gradient to a named PDF resource.
+fn register_gradient(
+ ctx: &mut PageContext,
+ gradient: &Gradient,
+ on_text: bool,
+ mut transforms: Transforms,
+) -> EcoString {
+ // Edge cases for strokes.
+ if transforms.size.x.is_zero() {
+ transforms.size.x = Abs::pt(1.0);
+ }
+
+ if transforms.size.y.is_zero() {
+ transforms.size.y = Abs::pt(1.0);
+ }
+
+ let size = match gradient.unwrap_relative(on_text) {
+ Relative::Self_ => transforms.size,
+ Relative::Parent => transforms.container_size,
+ };
+
+ // Correction for y-axis flipping on text.
+ let angle = gradient.angle().unwrap_or_else(Angle::zero);
+ let angle = if on_text { Angle::rad(TAU as f64) - angle } else { angle };
+
+ let (offset_x, offset_y) = match gradient {
+ Gradient::Conic(conic) => (
+ -size.x * (1.0 - conic.center.x.get() / 2.0) / 2.0,
+ -size.y * (1.0 - conic.center.y.get() / 2.0) / 2.0,
+ ),
+ _ => match angle.quadrant() {
+ Quadrant::First => (Abs::zero(), Abs::zero()),
+ Quadrant::Second => (size.x, Abs::zero()),
+ Quadrant::Third => (size.x, size.y),
+ Quadrant::Fourth => (Abs::zero(), size.y),
+ },
+ };
+
+ let rotation = match gradient {
+ Gradient::Conic(_) => Angle::zero(),
+ _ => angle,
+ };
+
+ let transform = match gradient.unwrap_relative(on_text) {
+ Relative::Self_ => transforms.transform,
+ Relative::Parent => transforms.container_transform,
+ };
+
+ let scale_offset = match gradient {
+ Gradient::Conic(_) => 4.0_f64,
+ _ => 1.0,
+ };
+
+ let pdf_gradient = PdfGradient {
+ aspect_ratio: size.aspect_ratio(),
+ transform: transform
+ .pre_concat(Transform::translate(
+ offset_x * scale_offset,
+ offset_y * scale_offset,
+ ))
+ .pre_concat(Transform::scale(
+ Ratio::new(size.x.to_pt() * scale_offset),
+ Ratio::new(size.y.to_pt() * scale_offset),
+ ))
+ .pre_concat(Transform::rotate(Gradient::correct_aspect_ratio(
+ rotation,
+ size.aspect_ratio(),
+ ))),
+ gradient: gradient.clone(),
+ on_text,
+ };
+
+ let index = ctx.parent.gradient_map.insert(pdf_gradient);
+ eco_format!("Gr{}", index)
+}
+
+/// Convert to an array of floats.
+fn transform_to_array(ts: Transform) -> [f32; 6] {
+ [
+ ts.sx.get() as f32,
+ ts.ky.get() as f32,
+ ts.kx.get() as f32,
+ ts.sy.get() as f32,
+ ts.tx.to_f32(),
+ ts.ty.to_f32(),
+ ]
+}
+
+/// Writes a single Coons Patch as defined in the PDF specification
+/// to a binary vec.
+///
+/// Structure:
+/// - flag: `u8`
+/// - points: `[u16; 24]`
+/// - colors: `[u16; 12]`
+fn write_patch(
+ target: &mut Vec<u8>,
+ t: f32,
+ t1: f32,
+ c0: [u16; 3],
+ c1: [u16; 3],
+ angle: Angle,
+ on_text: bool,
+) {
+ let mut theta = -TAU * t + angle.to_rad() as f32 + PI;
+ let mut theta1 = -TAU * t1 + angle.to_rad() as f32 + PI;
+
+ // Correction for y-axis flipping on text.
+ if on_text {
+ theta = (TAU - theta).rem_euclid(TAU);
+ theta1 = (TAU - theta1).rem_euclid(TAU);
+ }
+
+ let (cp1, cp2) =
+ control_point(Point::new(Abs::pt(0.5), Abs::pt(0.5)), 0.5, theta, theta1);
+
+ // Push the flag
+ target.push(0);
+
+ let p1 =
+ [u16::quantize(0.5, [0.0, 1.0]).to_be(), u16::quantize(0.5, [0.0, 1.0]).to_be()];
+
+ let p2 = [
+ u16::quantize(theta.cos(), [-1.0, 1.0]).to_be(),
+ u16::quantize(theta.sin(), [-1.0, 1.0]).to_be(),
+ ];
+
+ let p3 = [
+ u16::quantize(theta1.cos(), [-1.0, 1.0]).to_be(),
+ u16::quantize(theta1.sin(), [-1.0, 1.0]).to_be(),
+ ];
+
+ let cp1 = [
+ u16::quantize(cp1.x.to_f32(), [0.0, 1.0]).to_be(),
+ u16::quantize(cp1.y.to_f32(), [0.0, 1.0]).to_be(),
+ ];
+
+ let cp2 = [
+ u16::quantize(cp2.x.to_f32(), [0.0, 1.0]).to_be(),
+ u16::quantize(cp2.y.to_f32(), [0.0, 1.0]).to_be(),
+ ];
+
+ // Push the points
+ target.extend_from_slice(bytemuck::cast_slice(&[
+ p1, p1, p2, p2, cp1, cp2, p3, p3, p1, p1, p1, p1,
+ ]));
+
+ let colors =
+ [c0.map(u16::to_be), c0.map(u16::to_be), c1.map(u16::to_be), c1.map(u16::to_be)];
+
+ // Push the colors.
+ target.extend_from_slice(bytemuck::cast_slice(&colors));
+}
+
+fn control_point(c: Point, r: f32, angle_start: f32, angle_end: f32) -> (Point, Point) {
+ let n = (TAU / (angle_end - angle_start)).abs();
+ let f = ((angle_end - angle_start) / n).tan() * 4.0 / 3.0;
+
+ let p1 = c + Point::new(
+ Abs::pt((r * angle_start.cos() - f * r * angle_start.sin()) as f64),
+ Abs::pt((r * angle_start.sin() + f * r * angle_start.cos()) as f64),
+ );
+
+ let p2 = c + Point::new(
+ Abs::pt((r * angle_end.cos() + f * r * angle_end.sin()) as f64),
+ Abs::pt((r * angle_end.sin() - f * r * angle_end.cos()) as f64),
+ );
+
+ (p1, p2)
+}
+
+#[comemo::memoize]
+fn compute_vertex_stream(
+ conic: &ConicGradient,
+ aspect_ratio: Ratio,
+ on_text: bool,
+) -> Arc<Vec<u8>> {
+ // Generated vertices for the Coons patches
+ let mut vertices = Vec::new();
+
+ // Correct the gradient's angle
+ let angle = Gradient::correct_aspect_ratio(conic.angle, aspect_ratio);
+
+ // We want to generate a vertex based on some conditions, either:
+ // - At the boundary of a stop
+ // - At the boundary of a quadrant
+ // - When we cross the boundary of a hue turn (for HSV and HSL only)
+ for window in conic.stops.windows(2) {
+ let ((c0, t0), (c1, t1)) = (window[0], window[1]);
+
+ // Skip stops with the same position
+ if t0 == t1 {
+ continue;
+ }
+
+ // If the angle between the two stops is greater than 90 degrees, we need to
+ // generate a vertex at the boundary of the quadrant.
+ // However, we add more stops in-between to make the gradient smoother, so we
+ // need to generate a vertex at least every 5 degrees.
+ // If the colors are the same, we do it every quadrant only.
+ let slope = 1.0 / (t1.get() - t0.get());
+ let mut t_x = t0.get();
+ let dt = (t1.get() - t0.get()).min(0.25);
+ while t_x < t1.get() {
+ let t_next = (t_x + dt).min(t1.get());
+
+ let t1 = slope * (t_x - t0.get());
+ let t2 = slope * (t_next - t0.get());
+
+ // We don't use `Gradient::sample` to avoid issues with sharp gradients.
+ let c = Color::mix_iter(
+ [WeightedColor::new(c0, 1.0 - t1), WeightedColor::new(c1, t1)],
+ conic.space,
+ )
+ .unwrap();
+
+ let c_next = Color::mix_iter(
+ [WeightedColor::new(c0, 1.0 - t2), WeightedColor::new(c1, t2)],
+ conic.space,
+ )
+ .unwrap();
+
+ // If the color space is HSL or HSV, and we cross the 0°/360° boundary,
+ // we need to create two separate stops.
+ if conic.space == ColorSpace::Hsl || conic.space == ColorSpace::Hsv {
+ let [h1, s1, x1, _] = c.to_space(conic.space).to_vec4();
+ let [h2, s2, x2, _] = c_next.to_space(conic.space).to_vec4();
+
+ // Compute the intermediary stop at 360°.
+ if (h1 - h2).abs() > 180.0 {
+ let h1 = if h1 < h2 { h1 + 360.0 } else { h1 };
+ let h2 = if h2 < h1 { h2 + 360.0 } else { h2 };
+
+ // We compute where the crossing happens between zero and one
+ let t = (360.0 - h1) / (h2 - h1);
+ // We then map it back to the original range.
+ let t_prime = t * (t_next as f32 - t_x as f32) + t_x as f32;
+
+ // If the crossing happens between the two stops,
+ // we need to create an extra stop.
+ if t_prime <= t_next as f32 && t_prime >= t_x as f32 {
+ let c0 = [1.0, s1 * (1.0 - t) + s2 * t, x1 * (1.0 - t) + x2 * t];
+ let c1 = [0.0, s1 * (1.0 - t) + s2 * t, x1 * (1.0 - t) + x2 * t];
+ let c0 = c0.map(|c| u16::quantize(c, [0.0, 1.0]));
+ let c1 = c1.map(|c| u16::quantize(c, [0.0, 1.0]));
+
+ write_patch(
+ &mut vertices,
+ t_x as f32,
+ t_prime,
+ conic.space.convert(c),
+ c0,
+ angle,
+ on_text,
+ );
+
+ write_patch(
+ &mut vertices,
+ t_prime,
+ t_prime,
+ c0,
+ c1,
+ angle,
+ on_text,
+ );
+
+ write_patch(
+ &mut vertices,
+ t_prime,
+ t_next as f32,
+ c1,
+ conic.space.convert(c_next),
+ angle,
+ on_text,
+ );
+
+ t_x = t_next;
+ continue;
+ }
+ }
+ }
+
+ write_patch(
+ &mut vertices,
+ t_x as f32,
+ t_next as f32,
+ conic.space.convert(c),
+ conic.space.convert(c_next),
+ angle,
+ on_text,
+ );
+
+ t_x = t_next;
+ }
+ }
+
+ Arc::new(deflate(&vertices))
+}
diff --git a/crates/typst-pdf/src/icc/sGrey-v4.icc b/crates/typst-pdf/src/icc/sGrey-v4.icc
new file mode 100644
index 00000000..2187b678
--- /dev/null
+++ b/crates/typst-pdf/src/icc/sGrey-v4.icc
Binary files differ
diff --git a/crates/typst-pdf/src/icc/sRGB-v4.icc b/crates/typst-pdf/src/icc/sRGB-v4.icc
new file mode 100644
index 00000000..d9f3c055
--- /dev/null
+++ b/crates/typst-pdf/src/icc/sRGB-v4.icc
Binary files differ
diff --git a/crates/typst-pdf/src/image.rs b/crates/typst-pdf/src/image.rs
new file mode 100644
index 00000000..8d526efd
--- /dev/null
+++ b/crates/typst-pdf/src/image.rs
@@ -0,0 +1,170 @@
+use std::collections::HashMap;
+use std::io::Cursor;
+use std::sync::Arc;
+
+use image::{DynamicImage, GenericImageView, Rgba};
+use pdf_writer::{Chunk, Filter, Finish, Ref};
+use typst::geom::ColorSpace;
+use typst::image::{ImageKind, RasterFormat, RasterImage, SvgImage};
+
+use crate::{deflate, PdfContext};
+
+/// Embed all used images into the PDF.
+#[tracing::instrument(skip_all)]
+pub fn write_images(ctx: &mut PdfContext) {
+ for image in ctx.image_map.items() {
+ // Add the primary image.
+ match image.kind() {
+ ImageKind::Raster(raster) => {
+ // TODO: Error if image could not be encoded.
+ let (data, filter, has_color) = encode_raster_image(raster);
+ let width = image.width();
+ let height = image.height();
+
+ let image_ref = ctx.alloc.bump();
+ ctx.image_refs.push(image_ref);
+
+ let mut image = ctx.pdf.image_xobject(image_ref, &data);
+ image.filter(filter);
+ image.width(width as i32);
+ image.height(height as i32);
+ image.bits_per_component(8);
+
+ let mut icc_ref = None;
+ let space = image.color_space();
+ if raster.icc().is_some() {
+ let id = ctx.alloc.bump();
+ space.icc_based(id);
+ icc_ref = Some(id);
+ } else if has_color {
+ ctx.colors.write(ColorSpace::Srgb, space, &mut ctx.alloc);
+ } else {
+ ctx.colors.write(ColorSpace::D65Gray, space, &mut ctx.alloc);
+ }
+
+ // Add a second gray-scale image containing the alpha values if
+ // this image has an alpha channel.
+ if raster.dynamic().color().has_alpha() {
+ let (alpha_data, alpha_filter) = encode_alpha(raster);
+ let mask_ref = ctx.alloc.bump();
+ image.s_mask(mask_ref);
+ image.finish();
+
+ let mut mask = ctx.pdf.image_xobject(mask_ref, &alpha_data);
+ mask.filter(alpha_filter);
+ mask.width(width as i32);
+ mask.height(height as i32);
+ mask.color_space().device_gray();
+ mask.bits_per_component(8);
+ } else {
+ image.finish();
+ }
+
+ if let (Some(icc), Some(icc_ref)) = (raster.icc(), icc_ref) {
+ let compressed = deflate(icc);
+ let mut stream = ctx.pdf.icc_profile(icc_ref, &compressed);
+ stream.filter(Filter::FlateDecode);
+ if has_color {
+ stream.n(3);
+ stream.alternate().srgb();
+ } else {
+ stream.n(1);
+ stream.alternate().d65_gray();
+ }
+ }
+ }
+
+ ImageKind::Svg(svg) => {
+ let chunk = encode_svg(svg);
+ let mut map = HashMap::new();
+ chunk.renumber_into(&mut ctx.pdf, |old| {
+ *map.entry(old).or_insert_with(|| ctx.alloc.bump())
+ });
+ ctx.image_refs.push(map[&Ref::new(1)]);
+ }
+ }
+ }
+}
+
+/// Encode an image with a suitable filter and return the data, filter and
+/// whether the image has color.
+///
+/// Skips the alpha channel as that's encoded separately.
+#[comemo::memoize]
+#[tracing::instrument(skip_all)]
+fn encode_raster_image(image: &RasterImage) -> (Arc<Vec<u8>>, Filter, bool) {
+ let dynamic = image.dynamic();
+ match (image.format(), dynamic) {
+ // 8-bit gray JPEG.
+ (RasterFormat::Jpg, DynamicImage::ImageLuma8(_)) => {
+ let mut data = Cursor::new(vec![]);
+ dynamic.write_to(&mut data, image::ImageFormat::Jpeg).unwrap();
+ (data.into_inner().into(), Filter::DctDecode, false)
+ }
+
+ // 8-bit RGB JPEG (CMYK JPEGs get converted to RGB earlier).
+ (RasterFormat::Jpg, DynamicImage::ImageRgb8(_)) => {
+ let mut data = Cursor::new(vec![]);
+ dynamic.write_to(&mut data, image::ImageFormat::Jpeg).unwrap();
+ (data.into_inner().into(), Filter::DctDecode, true)
+ }
+
+ // TODO: Encode flate streams with PNG-predictor?
+
+ // 8-bit gray PNG.
+ (RasterFormat::Png, DynamicImage::ImageLuma8(luma)) => {
+ let data = deflate(luma.as_raw());
+ (data.into(), Filter::FlateDecode, false)
+ }
+
+ // Anything else (including Rgb(a) PNGs).
+ (_, buf) => {
+ let (width, height) = buf.dimensions();
+ let mut pixels = Vec::with_capacity(3 * width as usize * height as usize);
+ for (_, _, Rgba([r, g, b, _])) in buf.pixels() {
+ pixels.push(r);
+ pixels.push(g);
+ pixels.push(b);
+ }
+
+ let data = deflate(&pixels);
+ (data.into(), Filter::FlateDecode, true)
+ }
+ }
+}
+
+/// Encode an image's alpha channel if present.
+#[comemo::memoize]
+#[tracing::instrument(skip_all)]
+fn encode_alpha(raster: &RasterImage) -> (Arc<Vec<u8>>, Filter) {
+ let pixels: Vec<_> = raster
+ .dynamic()
+ .pixels()
+ .map(|(_, _, Rgba([_, _, _, a]))| a)
+ .collect();
+ (Arc::new(deflate(&pixels)), Filter::FlateDecode)
+}
+
+/// Encode an SVG into a chunk of PDF objects.
+///
+/// The main XObject will have ID 1.
+#[comemo::memoize]
+#[tracing::instrument(skip_all)]
+fn encode_svg(svg: &SvgImage) -> Arc<Chunk> {
+ let mut chunk = Chunk::new();
+
+ // Safety: We do not keep any references to tree nodes beyond the
+ // scope of `with`.
+ unsafe {
+ svg.with(|tree| {
+ svg2pdf::convert_tree_into(
+ tree,
+ svg2pdf::Options::default(),
+ &mut chunk,
+ Ref::new(1),
+ );
+ });
+ }
+
+ Arc::new(chunk)
+}
diff --git a/crates/typst-pdf/src/lib.rs b/crates/typst-pdf/src/lib.rs
new file mode 100644
index 00000000..92c84495
--- /dev/null
+++ b/crates/typst-pdf/src/lib.rs
@@ -0,0 +1,377 @@
+//! Exporting into PDF documents.
+
+mod color;
+mod extg;
+mod font;
+mod gradient;
+mod image;
+mod outline;
+mod page;
+
+use std::cmp::Eq;
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hash;
+
+use base64::Engine;
+use ecow::{eco_format, EcoString};
+use pdf_writer::types::Direction;
+use pdf_writer::{Finish, Name, Pdf, Ref, TextStr};
+use typst::doc::{Document, Lang};
+use typst::eval::Datetime;
+use typst::font::Font;
+use typst::geom::{Abs, Dir, Em};
+use typst::image::Image;
+use typst::model::Introspector;
+use xmp_writer::{DateTime, LangId, RenditionClass, Timezone, XmpWriter};
+
+use crate::color::ColorSpaces;
+use crate::extg::ExtGState;
+use crate::gradient::PdfGradient;
+use crate::page::Page;
+
+/// Export a document into a PDF file.
+///
+/// Returns the raw bytes making up the PDF file.
+///
+/// The `ident` parameter shall be a string that uniquely and stably identifies
+/// the document. It should not change between compilations of the same
+/// document. Its hash will be used to create a PDF document identifier (the
+/// identifier itself is not leaked). If `ident` is `None`, a hash of the
+/// document is used instead (which means that it _will_ change across
+/// compilations).
+///
+/// The `timestamp`, if given, is expected to be the creation date of the
+/// document as a UTC datetime. It will only be used if `set document(date: ..)`
+/// is `auto`.
+#[tracing::instrument(skip_all)]
+pub fn pdf(
+ document: &Document,
+ ident: Option<&str>,
+ timestamp: Option<Datetime>,
+) -> Vec<u8> {
+ let mut ctx = PdfContext::new(document);
+ page::construct_pages(&mut ctx, &document.pages);
+ font::write_fonts(&mut ctx);
+ image::write_images(&mut ctx);
+ gradient::write_gradients(&mut ctx);
+ extg::write_external_graphics_states(&mut ctx);
+ page::write_page_tree(&mut ctx);
+ write_catalog(&mut ctx, ident, timestamp);
+ ctx.pdf.finish()
+}
+
+/// Context for exporting a whole PDF document.
+pub struct PdfContext<'a> {
+ /// The document that we're currently exporting.
+ document: &'a Document,
+ /// An introspector for the document, used to resolve locations links and
+ /// the document outline.
+ introspector: Introspector,
+
+ /// The writer we are writing the PDF into.
+ pdf: Pdf,
+ /// Content of exported pages.
+ pages: Vec<Page>,
+ /// For each font a mapping from used glyphs to their text representation.
+ /// May contain multiple chars in case of ligatures or similar things. The
+ /// same glyph can have a different text representation within one document,
+ /// then we just save the first one. The resulting strings are used for the
+ /// PDF's /ToUnicode map for glyphs that don't have an entry in the font's
+ /// cmap. This is important for copy-paste and searching.
+ glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
+ /// The number of glyphs for all referenced languages in the document.
+ /// We keep track of this to determine the main document language.
+ languages: HashMap<Lang, usize>,
+
+ /// Allocator for indirect reference IDs.
+ alloc: Ref,
+ /// The ID of the page tree.
+ page_tree_ref: Ref,
+ /// The IDs of written pages.
+ page_refs: Vec<Ref>,
+ /// The IDs of written fonts.
+ font_refs: Vec<Ref>,
+ /// The IDs of written images.
+ image_refs: Vec<Ref>,
+ /// The IDs of written gradients.
+ gradient_refs: Vec<Ref>,
+ /// The IDs of written external graphics states.
+ ext_gs_refs: Vec<Ref>,
+ /// Handles color space writing.
+ colors: ColorSpaces,
+
+ /// Deduplicates fonts used across the document.
+ font_map: Remapper<Font>,
+ /// Deduplicates images used across the document.
+ image_map: Remapper<Image>,
+ /// Deduplicates gradients used across the document.
+ gradient_map: Remapper<PdfGradient>,
+ /// Deduplicates external graphics states used across the document.
+ extg_map: Remapper<ExtGState>,
+}
+
+impl<'a> PdfContext<'a> {
+ fn new(document: &'a Document) -> Self {
+ let mut alloc = Ref::new(1);
+ let page_tree_ref = alloc.bump();
+ Self {
+ document,
+ introspector: Introspector::new(&document.pages),
+ pdf: Pdf::new(),
+ pages: vec![],
+ glyph_sets: HashMap::new(),
+ languages: HashMap::new(),
+ alloc,
+ page_tree_ref,
+ page_refs: vec![],
+ font_refs: vec![],
+ image_refs: vec![],
+ gradient_refs: vec![],
+ ext_gs_refs: vec![],
+ colors: ColorSpaces::default(),
+ font_map: Remapper::new(),
+ image_map: Remapper::new(),
+ gradient_map: Remapper::new(),
+ extg_map: Remapper::new(),
+ }
+ }
+}
+
+/// Write the document catalog.
+#[tracing::instrument(skip_all)]
+fn write_catalog(ctx: &mut PdfContext, ident: Option<&str>, timestamp: Option<Datetime>) {
+ let lang = ctx
+ .languages
+ .iter()
+ .max_by_key(|(&lang, &count)| (count, lang))
+ .map(|(&k, _)| k);
+
+ let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
+ Direction::R2L
+ } else {
+ Direction::L2R
+ };
+
+ // Write the outline tree.
+ let outline_root_id = outline::write_outline(ctx);
+
+ // Write the page labels.
+ let page_labels = page::write_page_labels(ctx);
+
+ // Write the document information.
+ let mut info = ctx.pdf.document_info(ctx.alloc.bump());
+ let mut xmp = XmpWriter::new();
+ if let Some(title) = &ctx.document.title {
+ info.title(TextStr(title));
+ xmp.title([(None, title.as_str())]);
+ }
+
+ let authors = &ctx.document.author;
+ if !authors.is_empty() {
+ info.author(TextStr(&authors.join(", ")));
+ xmp.creator(authors.iter().map(|s| s.as_str()));
+ }
+
+ let creator = eco_format!("Typst {}", env!("CARGO_PKG_VERSION"));
+ info.creator(TextStr(&creator));
+ xmp.creator_tool(&creator);
+
+ let keywords = &ctx.document.keywords;
+ if !keywords.is_empty() {
+ let joined = keywords.join(", ");
+ info.keywords(TextStr(&joined));
+ xmp.pdf_keywords(&joined);
+ }
+
+ if let Some(date) = ctx.document.date.unwrap_or(timestamp) {
+ let tz = ctx.document.date.is_auto();
+ if let Some(pdf_date) = pdf_date(date, tz) {
+ info.creation_date(pdf_date);
+ info.modified_date(pdf_date);
+ }
+ if let Some(xmp_date) = xmp_date(date, tz) {
+ xmp.create_date(xmp_date);
+ xmp.modify_date(xmp_date);
+ }
+ }
+
+ info.finish();
+ xmp.num_pages(ctx.document.pages.len() as u32);
+ xmp.format("application/pdf");
+ xmp.language(ctx.languages.keys().map(|lang| LangId(lang.as_str())));
+
+ // A unique ID for this instance of the document. Changes if anything
+ // changes in the frames.
+ let instance_id = hash_base64(&ctx.pdf.as_bytes());
+
+ if let Some(ident) = ident {
+ // A unique ID for the document that stays stable across compilations.
+ let doc_id = hash_base64(&("PDF-1.7", ident));
+ xmp.document_id(&doc_id);
+ xmp.instance_id(&instance_id);
+ ctx.pdf
+ .set_file_id((doc_id.clone().into_bytes(), instance_id.into_bytes()));
+ } else {
+ // This is not spec-compliant, but some PDF readers really want an ID.
+ let bytes = instance_id.into_bytes();
+ ctx.pdf.set_file_id((bytes.clone(), bytes));
+ }
+
+ xmp.rendition_class(RenditionClass::Proof);
+ xmp.pdf_version("1.7");
+
+ let xmp_buf = xmp.finish(None);
+ let meta_ref = ctx.alloc.bump();
+ ctx.pdf
+ .stream(meta_ref, xmp_buf.as_bytes())
+ .pair(Name(b"Type"), Name(b"Metadata"))
+ .pair(Name(b"Subtype"), Name(b"XML"));
+
+ // Write the document catalog.
+ let mut catalog = ctx.pdf.catalog(ctx.alloc.bump());
+ catalog.pages(ctx.page_tree_ref);
+ catalog.viewer_preferences().direction(dir);
+ catalog.metadata(meta_ref);
+
+ // Insert the page labels.
+ if !page_labels.is_empty() {
+ let mut num_tree = catalog.page_labels();
+ let mut entries = num_tree.nums();
+ for (n, r) in &page_labels {
+ entries.insert(n.get() as i32 - 1, *r);
+ }
+ }
+
+ if let Some(outline_root_id) = outline_root_id {
+ catalog.outlines(outline_root_id);
+ }
+
+ if let Some(lang) = lang {
+ catalog.lang(TextStr(lang.as_str()));
+ }
+}
+
+/// Compress data with the DEFLATE algorithm.
+#[tracing::instrument(skip_all)]
+fn deflate(data: &[u8]) -> Vec<u8> {
+ const COMPRESSION_LEVEL: u8 = 6;
+ miniz_oxide::deflate::compress_to_vec_zlib(data, COMPRESSION_LEVEL)
+}
+
+/// Create a base64-encoded hash of the value.
+fn hash_base64<T: Hash>(value: &T) -> String {
+ base64::engine::general_purpose::STANDARD
+ .encode(typst::util::hash128(value).to_be_bytes())
+}
+
+/// Converts a datetime to a pdf-writer date.
+fn pdf_date(datetime: Datetime, tz: bool) -> Option<pdf_writer::Date> {
+ let year = datetime.year().filter(|&y| y >= 0)? as u16;
+
+ let mut pdf_date = pdf_writer::Date::new(year);
+
+ if let Some(month) = datetime.month() {
+ pdf_date = pdf_date.month(month);
+ }
+
+ if let Some(day) = datetime.day() {
+ pdf_date = pdf_date.day(day);
+ }
+
+ if let Some(h) = datetime.hour() {
+ pdf_date = pdf_date.hour(h);
+ }
+
+ if let Some(m) = datetime.minute() {
+ pdf_date = pdf_date.minute(m);
+ }
+
+ if let Some(s) = datetime.second() {
+ pdf_date = pdf_date.second(s);
+ }
+
+ if tz {
+ pdf_date = pdf_date.utc_offset_hour(0).utc_offset_minute(0);
+ }
+
+ Some(pdf_date)
+}
+
+/// Converts a datetime to an xmp-writer datetime.
+fn xmp_date(datetime: Datetime, tz: bool) -> Option<xmp_writer::DateTime> {
+ let year = datetime.year().filter(|&y| y >= 0)? as u16;
+ Some(DateTime {
+ year,
+ month: datetime.month(),
+ day: datetime.day(),
+ hour: datetime.hour(),
+ minute: datetime.minute(),
+ second: datetime.second(),
+ timezone: if tz { Some(Timezone::Utc) } else { None },
+ })
+}
+
+/// Assigns new, consecutive PDF-internal indices to items.
+struct Remapper<T> {
+ /// Forwards from the items to the pdf indices.
+ to_pdf: HashMap<T, usize>,
+ /// Backwards from the pdf indices to the items.
+ to_items: Vec<T>,
+}
+
+impl<T> Remapper<T>
+where
+ T: Eq + Hash + Clone,
+{
+ fn new() -> Self {
+ Self { to_pdf: HashMap::new(), to_items: vec![] }
+ }
+
+ fn insert(&mut self, item: T) -> usize {
+ let to_layout = &mut self.to_items;
+ *self.to_pdf.entry(item.clone()).or_insert_with(|| {
+ let pdf_index = to_layout.len();
+ to_layout.push(item);
+ pdf_index
+ })
+ }
+
+ fn map(&self, item: &T) -> usize {
+ self.to_pdf[item]
+ }
+
+ fn pdf_indices<'a>(
+ &'a self,
+ refs: &'a [Ref],
+ ) -> impl Iterator<Item = (Ref, usize)> + 'a {
+ refs.iter().copied().zip(0..self.to_pdf.len())
+ }
+
+ fn items(&self) -> impl Iterator<Item = &T> + '_ {
+ self.to_items.iter()
+ }
+}
+
+/// Additional methods for [`Abs`].
+trait AbsExt {
+ /// Convert an to a number of points.
+ fn to_f32(self) -> f32;
+}
+
+impl AbsExt for Abs {
+ fn to_f32(self) -> f32 {
+ self.to_pt() as f32
+ }
+}
+
+/// Additional methods for [`Em`].
+trait EmExt {
+ /// Convert an em length to a number of PDF font units.
+ fn to_font_units(self) -> f32;
+}
+
+impl EmExt for Em {
+ fn to_font_units(self) -> f32 {
+ 1000.0 * self.get() as f32
+ }
+}
diff --git a/crates/typst-pdf/src/outline.rs b/crates/typst-pdf/src/outline.rs
new file mode 100644
index 00000000..e51d85c3
--- /dev/null
+++ b/crates/typst-pdf/src/outline.rs
@@ -0,0 +1,191 @@
+use std::num::NonZeroUsize;
+
+use pdf_writer::{Finish, Ref, TextStr};
+use typst::eval::item;
+use typst::geom::{Abs, Smart};
+use typst::model::Content;
+
+use crate::{AbsExt, PdfContext};
+
+/// Construct the outline for the document.
+#[tracing::instrument(skip_all)]
+pub fn write_outline(ctx: &mut PdfContext) -> Option<Ref> {
+ let mut tree: Vec<HeadingNode> = vec![];
+
+ // Stores the level of the topmost skipped ancestor of the next bookmarked
+ // heading. A skipped heading is a heading with 'bookmarked: false', that
+ // is, it is not added to the PDF outline, and so is not in the tree.
+ // Therefore, its next descendant must be added at its level, which is
+ // enforced in the manner shown below.
+ let mut last_skipped_level = None;
+ for heading in ctx.introspector.query(&item!(heading_elem).select()).iter() {
+ let leaf = HeadingNode::leaf((**heading).clone());
+
+ if leaf.bookmarked {
+ let mut children = &mut tree;
+
+ // Descend the tree through the latest bookmarked heading of each
+ // level until either:
+ // - you reach a node whose children would be brothers of this
+ // heading (=> add the current heading as a child of this node);
+ // - you reach a node with no children (=> this heading probably
+ // skipped a few nesting levels in Typst, or one or more ancestors
+ // of this heading weren't bookmarked, so add it as a child of this
+ // node, which is its deepest bookmarked ancestor);
+ // - or, if the latest heading(s) was(/were) skipped
+ // ('bookmarked: false'), then stop if you reach a node whose
+ // children would be brothers of the latest skipped heading
+ // of lowest level (=> those skipped headings would be ancestors
+ // of the current heading, so add it as a 'brother' of the least
+ // deep skipped ancestor among them, as those ancestors weren't
+ // added to the bookmark tree, and the current heading should not
+ // be mistakenly added as a descendant of a brother of that
+ // ancestor.)
+ //
+ // That is, if you had a bookmarked heading of level N, a skipped
+ // heading of level N, a skipped heading of level N + 1, and then
+ // a bookmarked heading of level N + 2, that last one is bookmarked
+ // as a level N heading (taking the place of its topmost skipped
+ // ancestor), so that it is not mistakenly added as a descendant of
+ // the previous level N heading.
+ //
+ // In other words, a heading can be added to the bookmark tree
+ // at most as deep as its topmost skipped direct ancestor (if it
+ // exists), or at most as deep as its actual nesting level in Typst
+ // (not exceeding whichever is the most restrictive depth limit
+ // of those two).
+ while children.last().map_or(false, |last| {
+ last_skipped_level.map_or(true, |l| last.level < l)
+ && last.level < leaf.level
+ }) {
+ children = &mut children.last_mut().unwrap().children;
+ }
+
+ // Since this heading was bookmarked, the next heading, if it is a
+ // child of this one, won't have a skipped direct ancestor (indeed,
+ // this heading would be its most direct ancestor, and wasn't
+ // skipped). Therefore, it can be added as a child of this one, if
+ // needed, following the usual rules listed above.
+ last_skipped_level = None;
+ children.push(leaf);
+ } else if last_skipped_level.map_or(true, |l| leaf.level < l) {
+ // Only the topmost / lowest-level skipped heading matters when you
+ // have consecutive skipped headings (since none of them are being
+ // added to the bookmark tree), hence the condition above.
+ // This ensures the next bookmarked heading will be placed
+ // at most as deep as its topmost skipped ancestors. Deeper
+ // ancestors do not matter as the nesting structure they create
+ // won't be visible in the PDF outline.
+ last_skipped_level = Some(leaf.level);
+ }
+ }
+
+ if tree.is_empty() {
+ return None;
+ }
+
+ let root_id = ctx.alloc.bump();
+ let start_ref = ctx.alloc;
+ let len = tree.len();
+
+ let mut prev_ref = None;
+ for (i, node) in tree.iter().enumerate() {
+ prev_ref = Some(write_outline_item(ctx, node, root_id, prev_ref, i + 1 == len));
+ }
+
+ ctx.pdf
+ .outline(root_id)
+ .first(start_ref)
+ .last(Ref::new(ctx.alloc.get() - 1))
+ .count(tree.len() as i32);
+
+ Some(root_id)
+}
+
+/// A heading in the outline panel.
+#[derive(Debug, Clone)]
+struct HeadingNode {
+ element: Content,
+ level: NonZeroUsize,
+ bookmarked: bool,
+ children: Vec<HeadingNode>,
+}
+
+impl HeadingNode {
+ fn leaf(element: Content) -> Self {
+ HeadingNode {
+ level: element.expect_field_by_name::<NonZeroUsize>("level"),
+ // 'bookmarked' set to 'auto' falls back to the value of 'outlined'.
+ bookmarked: element
+ .expect_field_by_name::<Smart<bool>>("bookmarked")
+ .unwrap_or_else(|| element.expect_field_by_name::<bool>("outlined")),
+ element,
+ children: Vec::new(),
+ }
+ }
+
+ fn len(&self) -> usize {
+ 1 + self.children.iter().map(Self::len).sum::<usize>()
+ }
+}
+
+/// Write an outline item and all its children.
+#[tracing::instrument(skip_all)]
+fn write_outline_item(
+ ctx: &mut PdfContext,
+ node: &HeadingNode,
+ parent_ref: Ref,
+ prev_ref: Option<Ref>,
+ is_last: bool,
+) -> Ref {
+ let id = ctx.alloc.bump();
+ let next_ref = Ref::new(id.get() + node.len() as i32);
+
+ let mut outline = ctx.pdf.outline_item(id);
+ outline.parent(parent_ref);
+
+ if !is_last {
+ outline.next(next_ref);
+ }
+
+ if let Some(prev_rev) = prev_ref {
+ outline.prev(prev_rev);
+ }
+
+ if !node.children.is_empty() {
+ let current_child = Ref::new(id.get() + 1);
+ outline.first(current_child);
+ outline.last(Ref::new(next_ref.get() - 1));
+ outline.count(-(node.children.len() as i32));
+ }
+
+ let body = node.element.expect_field_by_name::<Content>("body");
+ outline.title(TextStr(body.plain_text().trim()));
+
+ let loc = node.element.location().unwrap();
+ let pos = ctx.introspector.position(loc);
+ let index = pos.page.get() - 1;
+ if let Some(page) = ctx.pages.get(index) {
+ let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
+ outline.dest().page(ctx.page_refs[index]).xyz(
+ pos.point.x.to_f32(),
+ (page.size.y - y).to_f32(),
+ None,
+ );
+ }
+
+ outline.finish();
+
+ let mut prev_ref = None;
+ for (i, child) in node.children.iter().enumerate() {
+ prev_ref = Some(write_outline_item(
+ ctx,
+ child,
+ id,
+ prev_ref,
+ i + 1 == node.children.len(),
+ ));
+ }
+
+ id
+}
diff --git a/crates/typst-pdf/src/page.rs b/crates/typst-pdf/src/page.rs
new file mode 100644
index 00000000..4de472f0
--- /dev/null
+++ b/crates/typst-pdf/src/page.rs
@@ -0,0 +1,759 @@
+use std::num::NonZeroUsize;
+use std::sync::Arc;
+
+use ecow::eco_format;
+use pdf_writer::types::{
+ ActionType, AnnotationType, ColorSpaceOperand, LineCapStyle, LineJoinStyle,
+ NumberingStyle,
+};
+use pdf_writer::writers::PageLabel;
+use pdf_writer::{Content, Filter, Finish, Name, Rect, Ref, Str, TextStr};
+use typst::doc::{
+ Destination, Frame, FrameItem, GroupItem, Meta, PdfPageLabel, PdfPageLabelStyle,
+ TextItem,
+};
+use typst::font::Font;
+use typst::geom::{
+ self, Abs, Em, FixedStroke, Geometry, LineCap, LineJoin, Numeric, Paint, Point,
+ Ratio, Shape, Size, Transform,
+};
+use typst::image::Image;
+
+use crate::color::PaintEncode;
+use crate::extg::ExtGState;
+use crate::{deflate, AbsExt, EmExt, PdfContext};
+
+/// Construct page objects.
+#[tracing::instrument(skip_all)]
+pub fn construct_pages(ctx: &mut PdfContext, frames: &[Frame]) {
+ for frame in frames {
+ construct_page(ctx, frame);
+ }
+}
+
+/// Construct a page object.
+#[tracing::instrument(skip_all)]
+pub fn construct_page(ctx: &mut PdfContext, frame: &Frame) {
+ let page_ref = ctx.alloc.bump();
+ ctx.page_refs.push(page_ref);
+
+ let mut ctx = PageContext {
+ parent: ctx,
+ page_ref,
+ label: None,
+ uses_opacities: false,
+ content: Content::new(),
+ state: State::new(frame.size()),
+ saves: vec![],
+ bottom: 0.0,
+ links: vec![],
+ };
+
+ let size = frame.size();
+
+ // Make the coordinate system start at the top-left.
+ ctx.bottom = size.y.to_f32();
+ ctx.transform(Transform {
+ sx: Ratio::one(),
+ ky: Ratio::zero(),
+ kx: Ratio::zero(),
+ sy: Ratio::new(-1.0),
+ tx: Abs::zero(),
+ ty: size.y,
+ });
+
+ // Encode the page into the content stream.
+ write_frame(&mut ctx, frame);
+
+ let page = Page {
+ size,
+ content: ctx.content.finish(),
+ id: ctx.page_ref,
+ uses_opacities: ctx.uses_opacities,
+ links: ctx.links,
+ label: ctx.label,
+ };
+
+ ctx.parent.pages.push(page);
+}
+
+/// Write the page tree.
+#[tracing::instrument(skip_all)]
+pub fn write_page_tree(ctx: &mut PdfContext) {
+ for i in 0..ctx.pages.len() {
+ write_page(ctx, i);
+ }
+
+ let mut pages = ctx.pdf.pages(ctx.page_tree_ref);
+ pages
+ .count(ctx.page_refs.len() as i32)
+ .kids(ctx.page_refs.iter().copied());
+
+ let mut resources = pages.resources();
+ ctx.colors
+ .write_color_spaces(resources.color_spaces(), &mut ctx.alloc);
+
+ let mut fonts = resources.fonts();
+ for (font_ref, f) in ctx.font_map.pdf_indices(&ctx.font_refs) {
+ let name = eco_format!("F{}", f);
+ fonts.pair(Name(name.as_bytes()), font_ref);
+ }
+
+ fonts.finish();
+
+ let mut images = resources.x_objects();
+ for (image_ref, im) in ctx.image_map.pdf_indices(&ctx.image_refs) {
+ let name = eco_format!("Im{}", im);
+ images.pair(Name(name.as_bytes()), image_ref);
+ }
+
+ images.finish();
+
+ let mut patterns = resources.patterns();
+ for (gradient_ref, gr) in ctx.gradient_map.pdf_indices(&ctx.gradient_refs) {
+ let name = eco_format!("Gr{}", gr);
+ patterns.pair(Name(name.as_bytes()), gradient_ref);
+ }
+
+ patterns.finish();
+
+ let mut ext_gs_states = resources.ext_g_states();
+ for (gs_ref, gs) in ctx.extg_map.pdf_indices(&ctx.ext_gs_refs) {
+ let name = eco_format!("Gs{}", gs);
+ ext_gs_states.pair(Name(name.as_bytes()), gs_ref);
+ }
+ ext_gs_states.finish();
+
+ resources.finish();
+ pages.finish();
+
+ // Write all of the functions used by the document.
+ ctx.colors.write_functions(&mut ctx.pdf);
+}
+
+/// Write a page tree node.
+#[tracing::instrument(skip_all)]
+fn write_page(ctx: &mut PdfContext, i: usize) {
+ let page = &ctx.pages[i];
+ let content_id = ctx.alloc.bump();
+
+ let mut page_writer = ctx.pdf.page(page.id);
+ page_writer.parent(ctx.page_tree_ref);
+
+ let w = page.size.x.to_f32();
+ let h = page.size.y.to_f32();
+ page_writer.media_box(Rect::new(0.0, 0.0, w, h));
+ page_writer.contents(content_id);
+
+ if page.uses_opacities {
+ page_writer
+ .group()
+ .transparency()
+ .isolated(false)
+ .knockout(false)
+ .color_space()
+ .srgb();
+ }
+
+ let mut annotations = page_writer.annotations();
+ for (dest, rect) in &page.links {
+ let mut annotation = annotations.push();
+ annotation.subtype(AnnotationType::Link).rect(*rect);
+ annotation.border(0.0, 0.0, 0.0, None);
+
+ let pos = match dest {
+ Destination::Url(uri) => {
+ annotation
+ .action()
+ .action_type(ActionType::Uri)
+ .uri(Str(uri.as_bytes()));
+ continue;
+ }
+ Destination::Position(pos) => *pos,
+ Destination::Location(loc) => ctx.introspector.position(*loc),
+ };
+
+ let index = pos.page.get() - 1;
+ let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
+ if let Some(page) = ctx.pages.get(index) {
+ annotation
+ .action()
+ .action_type(ActionType::GoTo)
+ .destination()
+ .page(ctx.page_refs[index])
+ .xyz(pos.point.x.to_f32(), (page.size.y - y).to_f32(), None);
+ }
+ }
+
+ annotations.finish();
+ page_writer.finish();
+
+ let data = deflate_content(&page.content);
+ ctx.pdf.stream(content_id, &data).filter(Filter::FlateDecode);
+}
+
+/// Write the page labels.
+#[tracing::instrument(skip_all)]
+pub fn write_page_labels(ctx: &mut PdfContext) -> Vec<(NonZeroUsize, Ref)> {
+ let mut result = vec![];
+ let mut prev: Option<&PdfPageLabel> = None;
+
+ for (i, page) in ctx.pages.iter().enumerate() {
+ let nr = NonZeroUsize::new(1 + i).unwrap();
+ let Some(label) = &page.label else { continue };
+
+ // Don't create a label if neither style nor prefix are specified.
+ if label.prefix.is_none() && label.style.is_none() {
+ continue;
+ }
+
+ if let Some(pre) = prev {
+ if label.prefix == pre.prefix
+ && label.style == pre.style
+ && label.offset == pre.offset.map(|n| n.saturating_add(1))
+ {
+ prev = Some(label);
+ continue;
+ }
+ }
+
+ let id = ctx.alloc.bump();
+ let mut entry = ctx.pdf.indirect(id).start::<PageLabel>();
+
+ // Only add what is actually provided. Don't add empty prefix string if
+ // it wasn't given for example.
+ if let Some(prefix) = &label.prefix {
+ entry.prefix(TextStr(prefix));
+ }
+
+ if let Some(style) = label.style {
+ entry.style(to_pdf_numbering_style(style));
+ }
+
+ if let Some(offset) = label.offset {
+ entry.offset(offset.get() as i32);
+ }
+
+ result.push((nr, id));
+ prev = Some(label);
+ }
+
+ result
+}
+
+/// Memoized version of [`deflate`] specialized for a page's content stream.
+#[comemo::memoize]
+fn deflate_content(content: &[u8]) -> Arc<Vec<u8>> {
+ Arc::new(deflate(content))
+}
+
+/// Data for an exported page.
+pub struct Page {
+ /// The indirect object id of the page.
+ pub id: Ref,
+ /// The page's dimensions.
+ pub size: Size,
+ /// The page's content stream.
+ pub content: Vec<u8>,
+ /// Whether the page uses opacities.
+ pub uses_opacities: bool,
+ /// Links in the PDF coordinate system.
+ pub links: Vec<(Destination, Rect)>,
+ /// The page's PDF label.
+ pub label: Option<PdfPageLabel>,
+}
+
+/// An exporter for the contents of a single PDF page.
+pub struct PageContext<'a, 'b> {
+ pub parent: &'a mut PdfContext<'b>,
+ page_ref: Ref,
+ label: Option<PdfPageLabel>,
+ pub content: Content,
+ state: State,
+ saves: Vec<State>,
+ bottom: f32,
+ uses_opacities: bool,
+ links: Vec<(Destination, Rect)>,
+}
+
+/// A simulated graphics state used to deduplicate graphics state changes and
+/// keep track of the current transformation matrix for link annotations.
+#[derive(Debug, Clone)]
+struct State {
+ /// The transform of the current item.
+ transform: Transform,
+ /// The transform of first hard frame in the hierarchy.
+ container_transform: Transform,
+ /// The size of the first hard frame in the hierarchy.
+ size: Size,
+ font: Option<(Font, Abs)>,
+ fill: Option<Paint>,
+ fill_space: Option<Name<'static>>,
+ external_graphics_state: Option<ExtGState>,
+ stroke: Option<FixedStroke>,
+ stroke_space: Option<Name<'static>>,
+}
+
+impl State {
+ /// Creates a new, clean state for a given page `size`.
+ pub fn new(size: Size) -> Self {
+ Self {
+ transform: Transform::identity(),
+ container_transform: Transform::identity(),
+ size,
+ font: None,
+ fill: None,
+ fill_space: None,
+ external_graphics_state: None,
+ stroke: None,
+ stroke_space: None,
+ }
+ }
+
+ /// Creates the [`Transforms`] structure for the current item.
+ pub fn transforms(&self, size: Size, pos: Point) -> Transforms {
+ Transforms {
+ transform: self.transform.pre_concat(Transform::translate(pos.x, pos.y)),
+ container_transform: self.container_transform,
+ container_size: self.size,
+ size,
+ }
+ }
+}
+
+/// Subset of the state used to calculate the transform of gradients and patterns.
+#[derive(Clone, Copy)]
+pub(super) struct Transforms {
+ /// The transform of the current item.
+ pub transform: Transform,
+ /// The transform of first hard frame in the hierarchy.
+ pub container_transform: Transform,
+ /// The size of the first hard frame in the hierarchy.
+ pub container_size: Size,
+ /// The size of the item.
+ pub size: Size,
+}
+
+impl PageContext<'_, '_> {
+ fn save_state(&mut self) {
+ self.saves.push(self.state.clone());
+ self.content.save_state();
+ }
+
+ fn restore_state(&mut self) {
+ self.content.restore_state();
+ self.state = self.saves.pop().expect("missing state save");
+ }
+
+ fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
+ let current_state = self.state.external_graphics_state.as_ref();
+ if current_state != Some(graphics_state) {
+ self.parent.extg_map.insert(*graphics_state);
+ let name = eco_format!("Gs{}", self.parent.extg_map.map(graphics_state));
+ self.content.set_parameters(Name(name.as_bytes()));
+
+ if graphics_state.uses_opacities() {
+ self.uses_opacities = true;
+ }
+ }
+ }
+
+ fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
+ let stroke_opacity = stroke
+ .map(|stroke| {
+ let color = match &stroke.paint {
+ Paint::Solid(color) => *color,
+ Paint::Gradient(_) => return 255,
+ };
+
+ color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
+ })
+ .unwrap_or(255);
+ let fill_opacity = fill
+ .map(|paint| {
+ let color = match paint {
+ Paint::Solid(color) => *color,
+ Paint::Gradient(_) => return 255,
+ };
+
+ color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
+ })
+ .unwrap_or(255);
+ self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
+ }
+
+ fn transform(&mut self, transform: Transform) {
+ let Transform { sx, ky, kx, sy, tx, ty } = transform;
+ self.state.transform = self.state.transform.pre_concat(transform);
+ self.content.transform([
+ sx.get() as _,
+ ky.get() as _,
+ kx.get() as _,
+ sy.get() as _,
+ tx.to_f32(),
+ ty.to_f32(),
+ ]);
+ }
+
+ fn group_transform(&mut self, transform: Transform) {
+ self.state.container_transform =
+ self.state.container_transform.pre_concat(transform);
+ }
+
+ fn set_font(&mut self, font: &Font, size: Abs) {
+ if self.state.font.as_ref().map(|(f, s)| (f, *s)) != Some((font, size)) {
+ self.parent.font_map.insert(font.clone());
+ let name = eco_format!("F{}", self.parent.font_map.map(font));
+ self.content.set_font(Name(name.as_bytes()), size.to_f32());
+ self.state.font = Some((font.clone(), size));
+ }
+ }
+
+ fn size(&mut self, size: Size) {
+ self.state.size = size;
+ }
+
+ fn set_fill(&mut self, fill: &Paint, on_text: bool, transforms: Transforms) {
+ if self.state.fill.as_ref() != Some(fill)
+ || matches!(self.state.fill, Some(Paint::Gradient(_)))
+ {
+ fill.set_as_fill(self, on_text, transforms);
+ self.state.fill = Some(fill.clone());
+ }
+ }
+
+ pub fn set_fill_color_space(&mut self, space: Name<'static>) {
+ if self.state.fill_space != Some(space) {
+ self.content.set_fill_color_space(ColorSpaceOperand::Named(space));
+ self.state.fill_space = Some(space);
+ }
+ }
+
+ pub fn reset_fill_color_space(&mut self) {
+ self.state.fill_space = None;
+ }
+
+ fn set_stroke(&mut self, stroke: &FixedStroke, transforms: Transforms) {
+ if self.state.stroke.as_ref() != Some(stroke)
+ || matches!(
+ self.state.stroke.as_ref().map(|s| &s.paint),
+ Some(Paint::Gradient(_))
+ )
+ {
+ let FixedStroke {
+ paint,
+ thickness,
+ line_cap,
+ line_join,
+ dash_pattern,
+ miter_limit,
+ } = stroke;
+
+ paint.set_as_stroke(self, false, transforms);
+
+ self.content.set_line_width(thickness.to_f32());
+ if self.state.stroke.as_ref().map(|s| &s.line_cap) != Some(line_cap) {
+ self.content.set_line_cap(to_pdf_line_cap(*line_cap));
+ }
+ if self.state.stroke.as_ref().map(|s| &s.line_join) != Some(line_join) {
+ self.content.set_line_join(to_pdf_line_join(*line_join));
+ }
+ if self.state.stroke.as_ref().map(|s| &s.dash_pattern) != Some(dash_pattern) {
+ if let Some(pattern) = dash_pattern {
+ self.content.set_dash_pattern(
+ pattern.array.iter().map(|l| l.to_f32()),
+ pattern.phase.to_f32(),
+ );
+ } else {
+ self.content.set_dash_pattern([], 0.0);
+ }
+ }
+ if self.state.stroke.as_ref().map(|s| &s.miter_limit) != Some(miter_limit) {
+ self.content.set_miter_limit(miter_limit.get() as f32);
+ }
+ self.state.stroke = Some(stroke.clone());
+ }
+ }
+
+ pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
+ if self.state.stroke_space != Some(space) {
+ self.content.set_stroke_color_space(ColorSpaceOperand::Named(space));
+ self.state.stroke_space = Some(space);
+ }
+ }
+
+ pub fn reset_stroke_color_space(&mut self) {
+ self.state.stroke_space = None;
+ }
+}
+
+/// Encode a frame into the content stream.
+fn write_frame(ctx: &mut PageContext, frame: &Frame) {
+ for &(pos, ref item) in frame.items() {
+ let x = pos.x.to_f32();
+ let y = pos.y.to_f32();
+
+ match item {
+ FrameItem::Group(group) => write_group(ctx, pos, group),
+ FrameItem::Text(text) => write_text(ctx, pos, text),
+ FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape),
+ FrameItem::Image(image, size, _) => write_image(ctx, x, y, image, *size),
+ FrameItem::Meta(meta, size) => match meta {
+ Meta::Link(dest) => write_link(ctx, pos, dest, *size),
+ Meta::Elem(_) => {}
+ Meta::Hide => {}
+ Meta::PageNumbering(_) => {}
+ Meta::PdfPageLabel(label) => ctx.label = Some(label.clone()),
+ },
+ }
+ }
+}
+
+/// Encode a group into the content stream.
+fn write_group(ctx: &mut PageContext, pos: Point, group: &GroupItem) {
+ let translation = Transform::translate(pos.x, pos.y);
+
+ ctx.save_state();
+
+ if group.frame.kind().is_hard() {
+ ctx.group_transform(
+ translation
+ .pre_concat(
+ ctx.state
+ .transform
+ .post_concat(ctx.state.container_transform.invert().unwrap()),
+ )
+ .pre_concat(group.transform),
+ );
+ ctx.size(group.frame.size());
+ }
+
+ ctx.transform(translation.pre_concat(group.transform));
+ if let Some(clip_path) = &group.clip_path {
+ write_path(ctx, 0.0, 0.0, clip_path);
+ ctx.content.clip_nonzero();
+ ctx.content.end_path();
+ }
+
+ write_frame(ctx, &group.frame);
+ ctx.restore_state();
+}
+
+/// Encode a text run into the content stream.
+fn write_text(ctx: &mut PageContext, pos: Point, text: &TextItem) {
+ let x = pos.x.to_f32();
+ let y = pos.y.to_f32();
+
+ *ctx.parent.languages.entry(text.lang).or_insert(0) += text.glyphs.len();
+
+ let glyph_set = ctx.parent.glyph_sets.entry(text.font.clone()).or_default();
+ for g in &text.glyphs {
+ let segment = &text.text[g.range()];
+ glyph_set.entry(g.id).or_insert_with(|| segment.into());
+ }
+
+ ctx.set_fill(&text.fill, true, ctx.state.transforms(Size::zero(), pos));
+ ctx.set_font(&text.font, text.size);
+ ctx.set_opacities(None, Some(&text.fill));
+ ctx.content.begin_text();
+
+ // Positiosn the text.
+ ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
+
+ let mut positioned = ctx.content.show_positioned();
+ let mut items = positioned.items();
+ let mut adjustment = Em::zero();
+ let mut encoded = vec![];
+
+ // Write the glyphs with kerning adjustments.
+ for glyph in &text.glyphs {
+ adjustment += glyph.x_offset;
+
+ if !adjustment.is_zero() {
+ if !encoded.is_empty() {
+ items.show(Str(&encoded));
+ encoded.clear();
+ }
+
+ items.adjust(-adjustment.to_font_units());
+ adjustment = Em::zero();
+ }
+
+ let cid = super::font::glyph_cid(&text.font, glyph.id);
+ encoded.push((cid >> 8) as u8);
+ encoded.push((cid & 0xff) as u8);
+
+ if let Some(advance) = text.font.advance(glyph.id) {
+ adjustment += glyph.x_advance - advance;
+ }
+
+ adjustment -= glyph.x_offset;
+ }
+
+ if !encoded.is_empty() {
+ items.show(Str(&encoded));
+ }
+
+ items.finish();
+ positioned.finish();
+ ctx.content.end_text();
+}
+
+/// Encode a geometrical shape into the content stream.
+fn write_shape(ctx: &mut PageContext, pos: Point, shape: &Shape) {
+ let x = pos.x.to_f32();
+ let y = pos.y.to_f32();
+
+ let stroke = shape.stroke.as_ref().and_then(|stroke| {
+ if stroke.thickness.to_f32() > 0.0 {
+ Some(stroke)
+ } else {
+ None
+ }
+ });
+
+ if shape.fill.is_none() && stroke.is_none() {
+ return;
+ }
+
+ if let Some(fill) = &shape.fill {
+ ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos));
+ }
+
+ if let Some(stroke) = stroke {
+ ctx.set_stroke(stroke, ctx.state.transforms(shape.geometry.bbox_size(), pos));
+ }
+
+ ctx.set_opacities(stroke, shape.fill.as_ref());
+
+ match shape.geometry {
+ Geometry::Line(target) => {
+ let dx = target.x.to_f32();
+ let dy = target.y.to_f32();
+ ctx.content.move_to(x, y);
+ ctx.content.line_to(x + dx, y + dy);
+ }
+ Geometry::Rect(size) => {
+ let w = size.x.to_f32();
+ let h = size.y.to_f32();
+ if w > 0.0 && h > 0.0 {
+ ctx.content.rect(x, y, w, h);
+ }
+ }
+ Geometry::Path(ref path) => {
+ write_path(ctx, x, y, path);
+ }
+ }
+
+ match (&shape.fill, stroke) {
+ (None, None) => unreachable!(),
+ (Some(_), None) => ctx.content.fill_nonzero(),
+ (None, Some(_)) => ctx.content.stroke(),
+ (Some(_), Some(_)) => ctx.content.fill_nonzero_and_stroke(),
+ };
+}
+
+/// Encode a bezier path into the content stream.
+fn write_path(ctx: &mut PageContext, x: f32, y: f32, path: &geom::Path) {
+ for elem in &path.0 {
+ match elem {
+ geom::PathItem::MoveTo(p) => {
+ ctx.content.move_to(x + p.x.to_f32(), y + p.y.to_f32())
+ }
+ geom::PathItem::LineTo(p) => {
+ ctx.content.line_to(x + p.x.to_f32(), y + p.y.to_f32())
+ }
+ geom::PathItem::CubicTo(p1, p2, p3) => ctx.content.cubic_to(
+ x + p1.x.to_f32(),
+ y + p1.y.to_f32(),
+ x + p2.x.to_f32(),
+ y + p2.y.to_f32(),
+ x + p3.x.to_f32(),
+ y + p3.y.to_f32(),
+ ),
+ geom::PathItem::ClosePath => ctx.content.close_path(),
+ };
+ }
+}
+
+/// Encode a vector or raster image into the content stream.
+fn write_image(ctx: &mut PageContext, x: f32, y: f32, image: &Image, size: Size) {
+ ctx.parent.image_map.insert(image.clone());
+ let name = eco_format!("Im{}", ctx.parent.image_map.map(image));
+ let w = size.x.to_f32();
+ let h = size.y.to_f32();
+ ctx.content.save_state();
+ ctx.content.transform([w, 0.0, 0.0, -h, x, y + h]);
+
+ if let Some(alt) = image.alt() {
+ let mut image_span =
+ ctx.content.begin_marked_content_with_properties(Name(b"Span"));
+ let mut image_alt = image_span.properties();
+ image_alt.pair(Name(b"Alt"), pdf_writer::Str(alt.as_bytes()));
+ image_alt.finish();
+ image_span.finish();
+
+ ctx.content.x_object(Name(name.as_bytes()));
+ ctx.content.end_marked_content();
+ } else {
+ ctx.content.x_object(Name(name.as_bytes()));
+ }
+
+ ctx.content.restore_state();
+}
+
+/// Save a link for later writing in the annotations dictionary.
+fn write_link(ctx: &mut PageContext, pos: Point, dest: &Destination, size: Size) {
+ let mut min_x = Abs::inf();
+ let mut min_y = Abs::inf();
+ let mut max_x = -Abs::inf();
+ let mut max_y = -Abs::inf();
+
+ // Compute the bounding box of the transformed link.
+ for point in [
+ pos,
+ pos + Point::with_x(size.x),
+ pos + Point::with_y(size.y),
+ pos + size.to_point(),
+ ] {
+ let t = point.transform(ctx.state.transform);
+ min_x.set_min(t.x);
+ min_y.set_min(t.y);
+ max_x.set_max(t.x);
+ max_y.set_max(t.y);
+ }
+
+ let x1 = min_x.to_f32();
+ let x2 = max_x.to_f32();
+ let y1 = max_y.to_f32();
+ let y2 = min_y.to_f32();
+ let rect = Rect::new(x1, y1, x2, y2);
+
+ ctx.links.push((dest.clone(), rect));
+}
+
+fn to_pdf_line_cap(cap: LineCap) -> LineCapStyle {
+ match cap {
+ LineCap::Butt => LineCapStyle::ButtCap,
+ LineCap::Round => LineCapStyle::RoundCap,
+ LineCap::Square => LineCapStyle::ProjectingSquareCap,
+ }
+}
+
+fn to_pdf_line_join(join: LineJoin) -> LineJoinStyle {
+ match join {
+ LineJoin::Miter => LineJoinStyle::MiterJoin,
+ LineJoin::Round => LineJoinStyle::RoundJoin,
+ LineJoin::Bevel => LineJoinStyle::BevelJoin,
+ }
+}
+
+fn to_pdf_numbering_style(style: PdfPageLabelStyle) -> NumberingStyle {
+ match style {
+ PdfPageLabelStyle::Arabic => NumberingStyle::Arabic,
+ PdfPageLabelStyle::LowerRoman => NumberingStyle::LowerRoman,
+ PdfPageLabelStyle::UpperRoman => NumberingStyle::UpperRoman,
+ PdfPageLabelStyle::LowerAlpha => NumberingStyle::LowerAlpha,
+ PdfPageLabelStyle::UpperAlpha => NumberingStyle::UpperAlpha,
+ }
+}
diff --git a/crates/typst-pdf/src/postscript/hsl.ps b/crates/typst-pdf/src/postscript/hsl.ps
new file mode 100644
index 00000000..740bc3ed
--- /dev/null
+++ b/crates/typst-pdf/src/postscript/hsl.ps
@@ -0,0 +1,63 @@
+
+{
+ % Starting stack: H, S, L
+ % /!\ WARNING: The hue component **MUST** be encoded
+ % in the range [0, 1] before calling this function.
+ % This is because the function assumes that the
+ % hue component are divided by a factor of 360
+ % in order to meet the range requirements of the
+ % PDF specification.
+
+ % First we do H = (H * 360.0) % 360
+ 3 2 roll 360 mul 3 1 roll
+
+ % Compute C = (1 - |2 * L - 1|) * S
+ dup 1 exch 2 mul 1 sub abs sub 3 2 roll mul
+
+ % P = (H / 60) % 2
+ 3 2 roll dup 60 div 2
+ 2 copy div cvi mul exch sub abs
+
+ % X = C * (1 - |P - 1|)
+ 1 exch 1 sub abs sub 3 2 roll dup 3 1 roll mul
+
+ % Compute m = L - C / 2
+ exch dup 2 div 5 4 roll exch sub
+
+ % Rotate so H is top
+ 4 3 roll exch 4 1 roll
+
+ % Construct the RGB stack
+ dup 60 lt {
+ % We need to build: (C, X, 0)
+ pop 0 3 1 roll
+ } {
+ dup 120 lt {
+ % We need to build: (X, C, 0)
+ pop exch 0 3 1 roll
+ } {
+ dup 180 lt {
+ % We need to build: (0, C, X)
+ pop 0
+ } {
+ dup 240 lt {
+ % We need to build: (0, X, C)
+ pop exch 0
+ } {
+ 300 lt {
+ % We need to build: (X, 0, C)
+ 0 3 2 roll
+ } {
+ % We need to build: (C, 0, X)
+ 0 exch
+ } ifelse
+ } ifelse
+ } ifelse
+ } ifelse
+ } ifelse
+
+ 4 3 roll
+
+ % Add m to each component
+ dup dup 6 2 roll add 5 2 roll add exch 4 3 roll add exch
+} \ No newline at end of file
diff --git a/crates/typst-pdf/src/postscript/hsv.ps b/crates/typst-pdf/src/postscript/hsv.ps
new file mode 100644
index 00000000..b29adf11
--- /dev/null
+++ b/crates/typst-pdf/src/postscript/hsv.ps
@@ -0,0 +1,62 @@
+{
+ % Starting stack: H, S, V
+ % /!\ WARNING: The hue component **MUST** be encoded
+ % in the range [0, 1] before calling this function.
+ % This is because the function assumes that the
+ % hue component are divided by a factor of 360
+ % in order to meet the range requirements of the
+ % PDF specification.
+
+ % First we do H = (H * 360.0) % 360
+ 3 2 roll 360 mul 3 1 roll
+
+ % Compute C = V * S
+ dup 3 1 roll mul
+
+ % P = (H / 60) % 2
+ 3 2 roll dup 60 div 2
+ 2 copy div cvi mul exch sub abs
+
+ % X = C * (1 - |P - 1|)
+ 1 exch 1 sub abs sub 3 2 roll dup 3 1 roll mul
+
+ % Compute m = V - C
+ exch dup 5 4 roll exch sub
+
+ % Rotate so H is top
+ 4 3 roll exch 4 1 roll
+
+ % Construct the RGB stack
+ dup 60 lt {
+ % We need to build: (C, X, 0)
+ pop 0 3 1 roll
+ } {
+ dup 120 lt {
+ % We need to build: (X, C, 0)
+ pop exch 0 3 1 roll
+ } {
+ dup 180 lt {
+ % We need to build: (0, C, X)
+ pop 0
+ } {
+ dup 240 lt {
+ % We need to build: (0, X, C)
+ pop exch 0
+ } {
+ 300 lt {
+ % We need to build: (X, 0, C)
+ 0 3 2 roll
+ } {
+ % We need to build: (C, 0, X)
+ 0 exch
+ } ifelse
+ } ifelse
+ } ifelse
+ } ifelse
+ } ifelse
+
+ 4 3 roll
+
+ % Add m to each component
+ dup dup 6 2 roll add 5 2 roll add exch 4 3 roll add exch
+} \ No newline at end of file
diff --git a/crates/typst-pdf/src/postscript/oklab.ps b/crates/typst-pdf/src/postscript/oklab.ps
new file mode 100644
index 00000000..4d6e9ad5
--- /dev/null
+++ b/crates/typst-pdf/src/postscript/oklab.ps
@@ -0,0 +1,78 @@
+{
+ % Starting stack: L, A, B
+ % /!\ WARNING: The A and B components **MUST** be encoded
+ % in the range [0, 1] before calling this function.
+ % This is because the function assumes that the
+ % A and B components are offset by a factor of 0.4
+ % in order to meet the range requirements of the
+ % PDF specification.
+
+ exch 0.4 sub
+ exch 0.4 sub
+
+ % Load L a and b into the stack
+ 2 index
+ 2 index
+ 2 index
+
+ % Compute f1 = ((0.3963377774 * a) + (0.2158037573 * b) + L)^3
+ 0.2158037573 mul exch
+ 0.3963377774 mul add add
+ dup dup mul mul
+
+ % Load L, a, and b into the stack
+ 3 index
+ 3 index
+ 3 index
+
+ % Compute f2 = ((-0.1055613458 * a) + (-0.0638541728 * b) + L)^3
+ -0.0638541728 mul exch
+ -0.1055613458 mul add add
+ dup dup mul mul
+
+ % Load L, a, and b into the stack
+ 4 index
+ 4 index
+ 4 index
+
+ % Compute f3 = ((-0.0894841775 * a) + (-1.2914855480 * b) + L)^3
+ -1.2914855480 mul exch
+ -0.0894841775 mul add add
+ dup dup mul mul
+
+ % Discard L, a, and b by rolling the stack and popping
+ 6 3 roll pop pop pop
+
+ % Load f1, f2, and f3 into the stack
+ 2 index
+ 2 index
+ 2 index
+
+ % Compute R = f1 * 4.0767416621 + f2 * -3.3077115913 + f3 * 0.2309699292
+ 0.2309699292 mul exch
+ -3.3077115913 mul add exch
+ 4.0767416621 mul add
+
+ % Load f1, f2, and f3 into the stack
+ 3 index
+ 3 index
+ 3 index
+
+ % Compute G = f1 * -1.2684380046 + f2 * 2.6097574011 + f3 * -0.3413193965
+ -0.3413193965 mul exch
+ 2.6097574011 mul add exch
+ -1.2684380046 mul add
+
+ % Load f1, f2, and f3 into the stack
+ 4 index
+ 4 index
+ 4 index
+
+ % Compute B = f1 * -0.0041960863 + f2 * -0.7034186147 + f3 * 1.7076147010
+ 1.7076147010 mul exch
+ -0.7034186147 mul add exch
+ -0.0041960863 mul add
+
+ % Discard f1, f2, and f3 by rolling the stack and popping
+ 6 3 roll pop pop pop
+} \ No newline at end of file