Squidex supports uploading image assets, then setting a focal point. Then when you download those assets with a width/height it will downscale those images automatically (you can even supply a quality parameter).

By default those images though are pinned to the https://cloud.squidex.io/ domain, that just will not work for me. I like to control all the things.

I really wanted to be able to experiment during dev but during export I wanted to download and include these assets with my static site. I whipped up some code to do just that.

Before I just show you the code, let's walk through what it is doing.

  1. Process markdown, adding syntax highlighting using prismjs
  2. If the current NPM command is a sapper export, let's parse the resulting HTML using cheerio
  3. Let's pull out any images, if they are sourced form Squidex, let's download those images, making sure to include the width and height if those attributes are present.
  4. Let's save the file to the sapper relative export path and then rewrite the URL to the local path

import prism from "prismjs";
import marked from "marked";
import fs from "fs";
import path from "path";
import request from "request";
import cheerio from 'cheerio';
require('prismjs/components/prism-jsx.min');

const renderer = new marked.Renderer();
renderer.code = (code, language) => {
  const parser = prism.languages[language] || prism.languages.html;
  const highlighted = prism.highlight(code, parser, language)
  return `<pre class="language-${language}">
    <code class="language-${language}">${highlighted}</code>
    </pre>`;
}

export default function renderMarkdown(text) {
    const html = marked(text, { renderer: renderer }).replace(/^\t{3}/gm, '');
    if (process.env.npm_lifecycle_script.startsWith('sapper export') === false) {
      return html;
    }

    const dom = cheerio.load(html);
    // const dom = parse(html, { pre: true });
    // return dom.innerHTML;    
    const images = dom("img");
    const prefix = `https://cloud.squidex.io/api/assets/${process.env.SQUIDEX_PROJECT}/`;

    for (let i = 0; i < images.length; i++) {
      const img = images[i];
      const src = img.attribs.src;
      const width = img.attribs.width;
      const height = img.attribs.height;
      let imageSpec = getImageSpec(width, height);
      const assetUrl = `${src}${imageSpec}`;
      const index = src.indexOf(prefix);

      if (src && index > -1 ) {
        const relativePath = getRelativePath(src, prefix);
        const localPath = path.resolve(`./__sapper__/export${relativePath}`);
        fs.mkdirSync(path.dirname(localPath), { recursive: true});
        download(assetUrl, localPath);
        img.attribs.src = relativePath;
      }
    }

    return dom.html();
}

function getRelativePath(src, prefix) {
  const queryPartIndex = src.indexOf('?');
  return '/assets/' + src.slice(0, queryPartIndex).replace(prefix, "");
}

function getImageSpec(width, height) {
  let imageSpec = "";
  if (width) {
    imageSpec += `&width=${width}`;
  }
  if (height) {
    imageSpec += `&height=${height}`;
  }
  return imageSpec;
}

function download(uri, filename){
  request(uri).pipe(fs.createWriteStream(filename));
};

Now all we need to do is change line 35 of src/routes/blog/_posts.js :.

    const html = renderMarkdown(post.text);

Run this using yarn export and any images you have included in your Squidex posts will be downloaded and optimized automatically.