Merge pull request 'Updates ' (#184) from updates into main

Reviewed-on: https://codeberg.org/angestoepselt/homepage/pulls/184
This commit is contained in:
Yannik Rödel 2025-06-19 07:02:47 +02:00
commit 3a6f6e8e41
29 changed files with 2565 additions and 7346 deletions

9
.gitignore vendored
View file

@ -1,7 +1,4 @@
# node_modules may either be a usual folder or a symlink at the project
# root (creted by the Nix update script).
node_modules/ node_modules/
/node_modules
# Eleventy build output # Eleventy build output
dist/ dist/
@ -14,11 +11,13 @@ _site/
# Environment # Environment
.env .env
.dev .dev
# `nix build` output
/result
# Private environments in the HTTP playground folder # Private environments in the HTTP playground folder
/playground/*.private.env.json /playground/*.private.env.json
/playground/*.csv /playground/*.csv
/httpd.dev.conf /httpd.dev.conf
# Obsolete Nix build system artifacts
/node_modules
/result

View file

@ -12,7 +12,6 @@ Du solltest nach dem Auschecken (oder in der Online-GUI) folgende Ordnerstruktur
- **assets/** Hilfsdateien für alle Auftritte. Dieser Ordner ist später unter `/assets` per HTTP verfügbar. Alles, was in diesem Ordner liegt, wird 1:1 kopiert. - **assets/** Hilfsdateien für alle Auftritte. Dieser Ordner ist später unter `/assets` per HTTP verfügbar. Alles, was in diesem Ordner liegt, wird 1:1 kopiert.
- **cgi-bin/** CGI-Skripte liegen hier. Diese sind zwar für alle Auftritte verfügbar, sind aber nicht überall genutzt. Diese Skripte werden auch nicht direkt von außen aufgerufen. Stattdessen schreibt der HTTP-Server relevante Anfragen um. - **cgi-bin/** CGI-Skripte liegen hier. Diese sind zwar für alle Auftritte verfügbar, sind aber nicht überall genutzt. Diese Skripte werden auch nicht direkt von außen aufgerufen. Stattdessen schreibt der HTTP-Server relevante Anfragen um.
- **includes/** Layout und Template-Dateien. Siehe dazu die [Eleventy-Dokumentation](https://www.11ty.dev/docs/config/#directory-for-includes). - **includes/** Layout und Template-Dateien. Siehe dazu die [Eleventy-Dokumentation](https://www.11ty.dev/docs/config/#directory-for-includes).
- **nix/** [Nix](https://nixos.org/)-spezifisches. Wahrscheinlich musst du hier nichts tun.
- **playground/** Das ist die Spielwiese. Wenn du irgendetwas ausprobieren möchtest, was grob zum Projekt passt, aber sonst keinen Platz in der Ordnerstruktur hat, leg es hier ab. - **playground/** Das ist die Spielwiese. Wenn du irgendetwas ausprobieren möchtest, was grob zum Projekt passt, aber sonst keinen Platz in der Ordnerstruktur hat, leg es hier ab.
- **sites/** Hier bekommt jeder Internetauftritt einen eigenen Unterordner. - **sites/** Hier bekommt jeder Internetauftritt einen eigenen Unterordner.
- **<name>/** - **<name>/**
@ -114,7 +113,7 @@ Natürlich ist das nicht ganz ideal, aber für unsere aktuelle Entwicklungsfrequ
Wenn du deine Änderungen gerne online stellen möchtest, musst du einen [Pull-Request](https://docs.codeberg.org/collaborating/pull-requests-and-git-flow/) einreichen. Wenn du deine Änderungen gerne online stellen möchtest, musst du einen [Pull-Request](https://docs.codeberg.org/collaborating/pull-requests-and-git-flow/) einreichen.
Ein Pull-Request bündelt deine Änderungen, sodass sie jemand vom Homepage-Team anschauen, genehmigen und einpflegen kann. Ein Pull-Request bündelt deine Änderungen, sodass sie jemand vom Homepage-Team anschauen, genehmigen und einpflegen kann.
Wenn du deine Änderungen über das Webinterface von Codeberg erstellst, wählst du beim Bearbeiten einer Datei im *Änderungen Commiten*-Dialog die Option "Einen neuen Branch für diesen Commit erstellen und einen Pull Request starten". Wenn du deine Änderungen über das Webinterface von Codeberg erstellst, wählst du beim Bearbeiten einer Datei im _Änderungen Commiten_-Dialog die Option "Einen neuen Branch für diesen Commit erstellen und einen Pull Request starten".
Wenn du anschließend in den Pull-Request noch eine weitere Änderung aufnehmen möchtest, sucher [hier](https://codeberg.org/angestoepselt/homepage/branches) deinen entsprechenden Branch und wähle ab der zweiten Bearbeitung in dem erwähnten Dialog "Direkt in den Branch `...` einchecken". Wenn du anschließend in den Pull-Request noch eine weitere Änderung aufnehmen möchtest, sucher [hier](https://codeberg.org/angestoepselt/homepage/branches) deinen entsprechenden Branch und wähle ab der zweiten Bearbeitung in dem erwähnten Dialog "Direkt in den Branch `...` einchecken".
Wenn du Mitglied des [Homepage](https://codeberg.org/org/angestoepselt/teams/homepage)-Teams auf Codeberg bist, kannst du ohne Forken direkt auf unserem Repository arbeiten. Wenn du Mitglied des [Homepage](https://codeberg.org/org/angestoepselt/teams/homepage)-Teams auf Codeberg bist, kannst du ohne Forken direkt auf unserem Repository arbeiten.

View file

@ -1,49 +1,44 @@
# #
# Static site build phase # Static site build phase
# #
FROM node:16 as build FROM docker.io/node:22 as build
ARG SITE ARG SITE
WORKDIR /build WORKDIR /build
COPY package.json package-lock.json /build/ COPY package.json package-lock.json .
RUN npm ci RUN --mount=type=cache,target=/root/.npm \
npm clean-install --omit=dev
COPY styles /build/styles/ COPY styles/ styles/
RUN npm run build:styles RUN npm run build:styles
COPY .eleventy* /build/ COPY eleventy.config.mjs .
COPY assets /build/assets/ COPY assets assets/
COPY includes /build/includes/ COPY includes includes/
COPY sites/${SITE} /build/sites/${SITE}/ COPY sites/${SITE} sites/${SITE}/
# These are symlinked from other sites: # These are symlinked from other sites:
COPY sites/angestoepselt/_images/home-banner.jpg /build/sites/angestoepselt/_images/ COPY sites/angestoepselt/_images/home-banner.jpg /build/sites/angestoepselt/_images/
RUN SITE=${SITE} npm run build:site
RUN SITE=${SITE} npm run build
# #
# Actual server container # Actual server container
# #
FROM python:3.10-alpine FROM docker.io/python:3.13-alpine
ARG SITE ARG SITE
# Install dependencies, see flake.nix for details. RUN --mount=type=cache,target=/root/.cache/pip \
RUN apk add --no-cache lighttpd && \ apk add --no-cache lighttpd && \
python -m pip install itsdangerous requests python -m pip install legacy-cgi itsdangerous requests
COPY --from=build /build/dist /www/ COPY --from=build /build/dist /www/
COPY cgi-bin /cgi-bin/ COPY cgi-bin /cgi-bin/
COPY sites/${SITE}/httpd.conf /httpd.conf COPY sites/${SITE}/httpd.conf /httpd.conf
COPY sites/${SITE}/_data/config.json /config.json COPY sites/${SITE}/_data/config.json /config.json
# Patch the lighttpd config file. These placeholders expect a Nix derivation
# path, so the Python binary used in the end is at @python@/bin/python. The
# values here make sure they are found correctly in the non-Nix container,
# even though the might not necessarily make sense directly.
RUN sed -i \ RUN sed -i \
-e "s,@mimetypes@,/etc/lighttpd/mime-types.conf,g" \ -e "s,@python@,/usr/local/bin/python,g" \
-e "s,@python@,/usr/local,g" \
-e "s,@site@,/www,g" \ -e "s,@site@,/www,g" \
-e "s,@cgibin@,/cgi-bin,g" \ -e "s,@cgibin@,/cgi-bin,g" \
/httpd.conf /httpd.conf

View file

@ -21,8 +21,8 @@ Falls du es eilig hast, hier sind die Grundlagen:
2. Das Projekt auschecken, dann `npm install` 2. Das Projekt auschecken, dann `npm install`
3. `npm run build:styles` 3. `npm run build:styles`
4. `SITE=angestoepselt npm run dev:site`<sup>1</sup> 4. `SITE=angestoepselt npm run dev:site`<sup>1</sup>
- Du solltest einen lokalen Testserver bekommen, der auf Änderungen reagiert. Formulare funktionieren hier nicht. - Du solltest einen lokalen Testserver bekommen, der auf Änderungen reagiert. Formulare funktionieren hier nicht.
- Im Ordner **dist/** liegen die fertigen Dateien für den Webserver. - Im Ordner **dist/** liegen die fertigen Dateien für den Webserver.
5. Änderungen in einem PR auf den `main`-Zweig einreichen 5. Änderungen in einem PR auf den `main`-Zweig einreichen
<small>1: Wähle für die Umgebungsvariable den Namen des Ordners unterhalb von **sites/** für die Seite, die du bearbeiten möchtest.</small> <small>1: Wähle für die Umgebungsvariable den Namen des Ordners unterhalb von **sites/** für die Seite, die du bearbeiten möchtest.</small>

View file

@ -2,7 +2,6 @@
import base64 import base64
import io import io
import cgi
import collections import collections
from collections.abc import Mapping from collections.abc import Mapping
import hmac import hmac
@ -13,6 +12,7 @@ import secrets
import json import json
from typing import Any, Optional, overload from typing import Any, Optional, overload
import cgi
import itsdangerous import itsdangerous
import requests import requests

View file

@ -10,12 +10,12 @@ services:
security_opt: security_opt:
- no-new-privileges:true - no-new-privileges:true
labels: labels:
- "traefik.enable=true" - 'traefik.enable=true'
- "traefik.docker.network=proxy" - 'traefik.docker.network=proxy'
- "traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}-secure.rule=Host(`${ANGESTOEPSELT_DOMAIN}`,`www.${ANGESTOEPSELT_DOMAIN}`)" - 'traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}-secure.rule=Host(`${ANGESTOEPSELT_DOMAIN}`,`www.${ANGESTOEPSELT_DOMAIN}`)'
- "traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}-secure.entrypoints=websecure" - 'traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}-secure.entrypoints=websecure'
- "traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}-secure.tls.certresolver=letsencrypt" - 'traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}-secure.tls.certresolver=letsencrypt'
- "traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}.middlewares=redirect-www-to-non-www@file" - 'traefik.http.routers.homepage-angestoepselt-${BRANCH:-main}.middlewares=redirect-www-to-non-www@file'
networks: networks:
- proxy - proxy
@ -27,12 +27,12 @@ services:
security_opt: security_opt:
- no-new-privileges:true - no-new-privileges:true
labels: labels:
- "traefik.enable=true" - 'traefik.enable=true'
- "traefik.docker.network=proxy" - 'traefik.docker.network=proxy'
- "traefik.http.routers.homepage-coderdojo-${BRANCH:-main}-secure.rule=Host(`${CODERDOJO_DOMAIN}`,`www.${CODERDOJO_DOMAIN}`)" - 'traefik.http.routers.homepage-coderdojo-${BRANCH:-main}-secure.rule=Host(`${CODERDOJO_DOMAIN}`,`www.${CODERDOJO_DOMAIN}`)'
- "traefik.http.routers.homepage-coderdojo-${BRANCH:-main}-secure.entrypoints=websecure" - 'traefik.http.routers.homepage-coderdojo-${BRANCH:-main}-secure.entrypoints=websecure'
- "traefik.http.routers.homepage-coderdojo-${BRANCH:-main}-secure.tls.certresolver=letsencrypt" - 'traefik.http.routers.homepage-coderdojo-${BRANCH:-main}-secure.tls.certresolver=letsencrypt'
- "traefik.http.routers.homepage-coderdojo-${BRANCH:-main}.middlewares=redirect-www-to-non-www@file" - 'traefik.http.routers.homepage-coderdojo-${BRANCH:-main}.middlewares=redirect-www-to-non-www@file'
networks: networks:
- proxy - proxy

View file

@ -1,13 +1,13 @@
const fs = require('fs'); import fs from 'fs';
const { DateTime } = require('luxon'); import { DateTime } from 'luxon';
const pluginRss = require('@11ty/eleventy-plugin-rss'); import pluginRss from '@11ty/eleventy-plugin-rss';
const pluginSyntaxHighlight = require('@11ty/eleventy-plugin-syntaxhighlight'); import pluginSyntaxHighlight from '@11ty/eleventy-plugin-syntaxhighlight';
const pluginNavigation = require('@11ty/eleventy-navigation'); import pluginNavigation from '@11ty/eleventy-navigation';
const Image = require('@11ty/eleventy-img'); import Image from '@11ty/eleventy-img';
const markdownIt = require('markdown-it'); import markdownIt from 'markdown-it';
const markdownItAnchor = require('markdown-it-anchor'); import markdownItAnchor from 'markdown-it-anchor';
const markdownItAttrs = require('markdown-it-attrs'); import markdownItAttrs from 'markdown-it-attrs';
function hyphenize(input) { function hyphenize(input) {
return input return input
@ -16,14 +16,14 @@ function hyphenize(input) {
.toLowerCase(); .toLowerCase();
} }
module.exports = function (eleventyConfig) { const siteName = process.env.SITE;
const siteName = process.env.SITE; if (!siteName) {
if (!siteName) { throw new Error(
throw new Error( 'Cannot determine the name of the site to build. Make sure to set the SITE environment variable.'
'Cannot determine the name of the site to build. Make sure to set the SITE environment variable.' );
); }
}
export default function (eleventyConfig) {
eleventyConfig.addPlugin(pluginRss); eleventyConfig.addPlugin(pluginRss);
eleventyConfig.addPlugin(pluginSyntaxHighlight); eleventyConfig.addPlugin(pluginSyntaxHighlight);
eleventyConfig.addPlugin(pluginNavigation); eleventyConfig.addPlugin(pluginNavigation);
@ -196,25 +196,21 @@ ${
ui: false, ui: false,
ghostMode: false, ghostMode: false,
}); });
}
// export const config = {
// Other settings dir: {
// input: `sites/${siteName}`,
output: 'dist',
// These are all relative to the input directory so the paths get a little
// weird:
includes: '../../includes',
data: '_data',
},
return { templateFormats: ['md', 'njk', 'html', 'liquid'],
dir: {
input: `sites/${siteName}`,
output: 'dist',
// These are all relative to the input directory so the paths get a little
// weird:
includes: '../../includes',
data: '_data',
},
templateFormats: ['md', 'njk', 'html', 'liquid'], markdownTemplateEngine: 'njk',
htmlTemplateEngine: 'njk',
markdownTemplateEngine: 'njk', dataTemplateEngine: false,
htmlTemplateEngine: 'njk',
dataTemplateEngine: false,
};
}; };

60
flake.lock generated
View file

@ -1,60 +0,0 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1683408522,
"narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-unstable",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View file

@ -1,70 +0,0 @@
{
description = "Angestöpselt Homepage";
inputs.nixpkgs.url = "nixpkgs/nixos-unstable";
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = {
self,
nixpkgs,
flake-utils,
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = import nixpkgs {inherit system;};
nodejs = pkgs.nodejs-18_x;
nodePackages = import ./nix/default.nix {inherit pkgs system nodejs;};
nodeDependencies = nodePackages.nodeDependencies.override {
src = pkgs.runCommand "angestoepselt-homepage-package-json" {} ''
mkdir -p "$out"
cp ${./package.json} "$out/package.json"
cp ${./package-lock.json} "$out/package-lock.json"
'';
nativeBuildInputs = [pkgs.pkg-config];
buildInputs = [pkgs.vips pkgs.glib];
};
python = pkgs.python310.withPackages (ps:
with ps; [
itsdangerous
requests
]);
in rec {
packages = {
devEnv = pkgs.symlinkJoin {
name = "angestoepselt-homepage-dev";
paths = [
pkgs.lighttpd
nodejs
nodeDependencies
python
];
buildInputs = [pkgs.makeWrapper];
postBuild = ''
wrapProgram "$out/bin/node" \
--prefix PATH : "$out/lib/node_modules/.bin" \
--prefix NODE_PATH : "$out/lib/node_modules"
'';
shellHook = ''
echo ""
echo " To start editing content, run:"
echo ""
echo "npm run build:styles"
echo "npm run dev:site"
echo ""
echo " The site will be available under http://localhost:8080/ for"
echo " local development and rebuilds automatically when content"
echo " changes."
echo ""
'';
};
};
devShells.default = pkgs.stdenvNoCC.mkDerivation {
name = "angestoepselt-homepage-shell";
nativeBuildInputs = [packages.devEnv];
};
});
}

View file

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-18_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View file

@ -1,689 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

File diff suppressed because it is too large Load diff

View file

@ -1,22 +0,0 @@
PROJECT_DIR=$(dirname "$(dirname "$0")")
NIX_DIR="$PROJECT_DIR/nix"
if [ -h "$PROJECT_DIR/node_modules" ]; then
rm node_modules
fi
npm install --package-lock-only
node2nix \
-i "$PROJECT_DIR/package.json" \
-l "$PROJECT_DIR/package-lock.json" \
-o "$NIX_DIR/node-packages.nix" \
-c "$NIX_DIR/default.nix" \
-e "$NIX_DIR/node-env.nix" \
--development \
--include-peer-dependencies
nix build -o "$PROJECT_DIR/.dev" ".#devEnv"
if [ ! -e "$PROJECT_DIR/node_modules" ]; then
cd "$PROJECT_DIR"; ln -s .dev/lib/node_modules .
fi

5829
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -10,18 +10,20 @@
"dev:styles": "sass --watch styles/:dist/assets/css/" "dev:styles": "sass --watch styles/:dist/assets/css/"
}, },
"license": "MIT", "license": "MIT",
"dependencies": {
"@11ty/eleventy": "^3.1.1",
"@11ty/eleventy-img": "^6.0.4",
"@11ty/eleventy-navigation": "^1.0.4",
"@11ty/eleventy-plugin-rss": "^2.0.4",
"@11ty/eleventy-plugin-syntaxhighlight": "^5.0.1",
"luxon": "^3.6.1",
"markdown-it": "^14.1.0",
"markdown-it-anchor": "^9.2.0",
"markdown-it-attrs": "^4.3.1",
"node-gyp-build": "^4.8.4",
"sass": "^1.89.2"
},
"devDependencies": { "devDependencies": {
"@11ty/eleventy": "^2.0.1", "prettier": "^3.5.3"
"@11ty/eleventy-img": "^3.1.0",
"@11ty/eleventy-navigation": "^0.3.5",
"@11ty/eleventy-plugin-rss": "^1.2.0",
"@11ty/eleventy-plugin-syntaxhighlight": "^5.0.0",
"luxon": "^3.3.0",
"markdown-it": "^13.0.1",
"markdown-it-anchor": "^8.6.7",
"markdown-it-attrs": "^4.1.6",
"node-gyp-build": "^4.6.0",
"prettier": "^2.8.8",
"sass": "^1.62.1"
} }
} }

View file

@ -1,25 +1,22 @@
{ {
"title": "Angestöpselt", "title": "Angestöpselt",
"url": "https://www.angestoepselt.de/", "url": "https://www.angestoepselt.de/",
"language": "de", "language": "de",
"description": "Verein für Digitalkompetenz", "description": "Verein für Digitalkompetenz",
"feed": { "feed": {
"subtitle": "Verein für Digitalkompetenz", "subtitle": "Verein für Digitalkompetenz",
"filename": "feed.xml", "filename": "feed.xml",
"path": "/feed/feed.xml", "path": "/feed/feed.xml",
"id": "https://www.angestoepselt.de/" "id": "https://www.angestoepselt.de/"
}, },
"jsonfeed": { "jsonfeed": {
"path": "/feed/feed.json", "path": "/feed/feed.json",
"url": "https://www.angestoepselt.de/feed/feed.json" "url": "https://www.angestoepselt.de/feed/feed.json"
}, },
"author": { "author": {
"name": "Angestöpselt e. V.", "name": "Angestöpselt e. V.",
"email": "info@angestoepselt.de", "email": "info@angestoepselt.de",
"url": "https://www.angestoepselt.de/", "url": "https://www.angestoepselt.de/",
"address": [ "address": ["Zeller Straße 29/31", "97082 Würzburg"]
"Zeller Straße 29/31", }
"97082 Würzburg"
]
}
} }

View file

@ -1,19 +1,19 @@
{ {
"name": "Angestöpselt e.V.", "name": "Angestöpselt e.V.",
"short_name": "Angestöpselt", "short_name": "Angestöpselt",
"icons": [ "icons": [
{ {
"src": "/android-chrome-192x192.png", "src": "/android-chrome-192x192.png",
"sizes": "192x192", "sizes": "192x192",
"type": "image/png" "type": "image/png"
}, },
{ {
"src": "/android-chrome-512x512.png", "src": "/android-chrome-512x512.png",
"sizes": "512x512", "sizes": "512x512",
"type": "image/png" "type": "image/png"
} }
], ],
"theme_color": "#50dcc7", "theme_color": "#50dcc7",
"background_color": "#50dcc7", "background_color": "#50dcc7",
"display": "standalone" "display": "standalone"
} }

View file

@ -10,7 +10,7 @@ eleventyExcludeFromCollections: true
{% set absoluteUrl %}{{ metadata.feed.path | url | absoluteUrl(metadata.url) }}{% endset %} {% set absoluteUrl %}{{ metadata.feed.path | url | absoluteUrl(metadata.url) }}{% endset %}
<link href="{{ absoluteUrl }}" rel="self"/> <link href="{{ absoluteUrl }}" rel="self"/>
<link href="{{ metadata.url }}"/> <link href="{{ metadata.url }}"/>
<updated>{{ collections.posts | rssLastUpdatedDate }}</updated> <updated>{{ collections.posts | getNewestCollectionItemDate | dateToRfc3339 }}</updated>
<id>{{ metadata.feed.id }}</id> <id>{{ metadata.feed.id }}</id>
<author> <author>
<name>{{ metadata.author.name }}</name> <name>{{ metadata.author.name }}</name>
@ -21,7 +21,7 @@ eleventyExcludeFromCollections: true
<entry> <entry>
<title>{{ post.data.title }}</title> <title>{{ post.data.title }}</title>
<link href="{{ absolutePostUrl }}"/> <link href="{{ absolutePostUrl }}"/>
<updated>{{ post.date | rssDate }}</updated> <updated>{{ post.date | dateToRfc3339 }}</updated>
<id>{{ absolutePostUrl }}</id> <id>{{ absolutePostUrl }}</id>
<content type="html">{{ post.templateContent | htmlToAbsoluteUrls(absolutePostUrl) }}</content> <content type="html">{{ post.templateContent | htmlToAbsoluteUrls(absolutePostUrl) }}</content>
</entry> </entry>

View file

@ -22,7 +22,7 @@ eleventyExcludeFromCollections: true
"url": "{{ absolutePostUrl }}", "url": "{{ absolutePostUrl }}",
"title": "{{ post.data.title }}", "title": "{{ post.data.title }}",
"content_html": {% if post.templateContent %}{{ post.templateContent | htmlToAbsoluteUrls(absolutePostUrl) | dump | safe }}{% else %}""{% endif %}, "content_html": {% if post.templateContent %}{{ post.templateContent | htmlToAbsoluteUrls(absolutePostUrl) | dump | safe }}{% else %}""{% endif %},
"date_published": "{{ post.date | rssDate }}" "date_published": "{{ post.date | dateToRfc3339 }}"
} }
{%- if not loop.last -%} {%- if not loop.last -%}
, ,

View file

@ -5,8 +5,6 @@ server.port = 80
# See here: https://redmine.lighttpd.net/boards/2/topics/8382 # See here: https://redmine.lighttpd.net/boards/2/topics/8382
accesslog.filename = "/dev/fd/3" accesslog.filename = "/dev/fd/3"
include "@mimetypes@"
server.document-root = "@site@" server.document-root = "@site@"
index-file.names = ( "index.html" ) index-file.names = ( "index.html" )
@ -51,7 +49,7 @@ $HTTP["url"] =~ "^/cgi-bin/" {
alias.url += ( "/cgi-bin" => "@cgibin@" ) alias.url += ( "/cgi-bin" => "@cgibin@" )
static-file.exclude-extensions = ( ".py" ) static-file.exclude-extensions = ( ".py" )
cgi.assign = ( ".py" => "@python@/bin/python" ) cgi.assign = ( ".py" => "@python@" )
cgi.execute-x-only = "enable" cgi.execute-x-only = "enable"
cgi.x-sendfile = "enable" cgi.x-sendfile = "enable"

View file

@ -1,15 +1,12 @@
{ {
"title": "CoderDojo Würzburg", "title": "CoderDojo Würzburg",
"url": "https://coderdojo-wue.de/", "url": "https://coderdojo-wue.de/",
"language": "de", "language": "de",
"description": "Kostenlose Programmier- und Techniktreffen für Kinder und Jugendliche im Raum Würzburg.", "description": "Kostenlose Programmier- und Techniktreffen für Kinder und Jugendliche im Raum Würzburg.",
"author": { "author": {
"name": "Angestöpselt e. V.", "name": "Angestöpselt e. V.",
"email": "kontakt@coderdojo-wue.de", "email": "kontakt@coderdojo-wue.de",
"url": "https://coderdojo-wue.de/", "url": "https://coderdojo-wue.de/",
"address": [ "address": ["Zeller Straße 29/31", "97082 Würzburg"]
"Zeller Straße 29/31", }
"97082 Würzburg"
]
}
} }

View file

@ -5,8 +5,6 @@ server.port = 80
# See here: https://redmine.lighttpd.net/boards/2/topics/8382 # See here: https://redmine.lighttpd.net/boards/2/topics/8382
accesslog.filename = "/dev/fd/3" accesslog.filename = "/dev/fd/3"
include "@mimetypes@"
server.document-root = "@site@" server.document-root = "@site@"
index-file.names = ( "index.html" ) index-file.names = ( "index.html" )
@ -29,7 +27,7 @@ $HTTP["url"] =~ "^/cgi-bin/" {
alias.url += ( "/cgi-bin" => "@cgibin@" ) alias.url += ( "/cgi-bin" => "@cgibin@" )
static-file.exclude-extensions = ( ".py" ) static-file.exclude-extensions = ( ".py" )
cgi.assign = ( ".py" => "@python@/bin/python" ) cgi.assign = ( ".py" => "@python@" )
cgi.execute-x-only = "enable" cgi.execute-x-only = "enable"
cgi.x-sendfile = "enable" cgi.x-sendfile = "enable"

View file

@ -37,7 +37,9 @@
color: inherit; color: inherit;
text-decoration: none; text-decoration: none;
@include colors.card-shadow; @include colors.card-shadow;
transition: motion.$subtle background-color, motion.$subtle transform; transition:
motion.$subtle background-color,
motion.$subtle transform;
> h3, > h3,
> div > h3 { > div > h3 {
@ -195,6 +197,9 @@
.sun { .sun {
$final-transformation: translateX(20%) translateY(10%) scale(3.4); $final-transformation: translateX(20%) translateY(10%) scale(3.4);
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running action-icon-sun;
@keyframes action-icon-sun { @keyframes action-icon-sun {
0% { 0% {
transform: none; transform: none;
@ -206,15 +211,16 @@
transform: $final-transformation; transform: $final-transformation;
} }
} }
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running action-icon-sun;
} }
.heart-left { .heart-left {
$final-transformation: translateX(-0.8rem) translateY(1.4rem) scale(1.5) $final-transformation: translateX(-0.8rem) translateY(1.4rem) scale(1.5)
rotate(-25deg); rotate(-25deg);
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running
action-icon-heart-left;
@keyframes action-icon-heart-left { @keyframes action-icon-heart-left {
0% { 0% {
transform: none; transform: none;
@ -227,16 +233,16 @@
transform: $final-transformation; transform: $final-transformation;
} }
} }
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running
action-icon-heart-left;
} }
.heart-right { .heart-right {
$final-transformation: translateX(1.4rem) translateY(-0.1rem) scale(1.6) $final-transformation: translateX(1.4rem) translateY(-0.1rem) scale(1.6)
rotate(15deg); rotate(15deg);
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running
action-icon-heart-right;
@keyframes action-icon-heart-right { @keyframes action-icon-heart-right {
0% { 0% {
transform: none; transform: none;
@ -249,15 +255,14 @@
transform: $final-transformation; transform: $final-transformation;
} }
} }
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running
action-icon-heart-right;
} }
.coin { .coin {
$final-transformation: scale(0.8); $final-transformation: scale(0.8);
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running action-icon-coin;
@keyframes action-icon-coin { @keyframes action-icon-coin {
0% { 0% {
transform: none; transform: none;
@ -269,8 +274,5 @@
transform: $final-transformation; transform: $final-transformation;
} }
} }
transform: $final-transformation;
animation: motion.$gentle 0s 1 normal backwards running action-icon-coin;
} }
} }

View file

@ -1,3 +1,4 @@
@use 'sass:color';
@use 'sass:math'; @use 'sass:math';
@use '../lib/colors'; @use '../lib/colors';
@use '../lib/motion'; @use '../lib/motion';
@ -57,7 +58,9 @@
display: inline-block; display: inline-block;
line-height: 2.5; line-height: 2.5;
text-decoration: none; text-decoration: none;
transition: motion.$subtle background-color, motion.$subtle box-shadow; transition:
motion.$subtle background-color,
motion.$subtle box-shadow;
@keyframes form-choice-hover { @keyframes form-choice-hover {
0% { 0% {
@ -74,7 +77,7 @@
background-image: linear-gradient( background-image: linear-gradient(
-45deg, -45deg,
transparent 0%, transparent 0%,
#{transparentize(colors.$yellow-500, 0.6)} 50%, #{color.scale(colors.$yellow-500, $alpha: -60%)} 50%,
transparent 100% transparent 100%
); );
background-size: 200% 100%; background-size: 200% 100%;

View file

@ -143,7 +143,9 @@ blockquote {
text-decoration: none; text-decoration: none;
line-height: 3rem; line-height: 3rem;
border: 1px solid colors.$main-text; border: 1px solid colors.$main-text;
transition: font motion.$subtle, border-color motion.$subtle, transition:
font motion.$subtle,
border-color motion.$subtle,
color motion.$subtle; color motion.$subtle;
&:hover { &:hover {
@ -197,7 +199,9 @@ ul.link-grid {
text-align: center; text-align: center;
text-decoration: none; text-decoration: none;
color: colors.$gray-600; color: colors.$gray-600;
transition: color motion.$subtle, box-shadow motion.$subtle; transition:
color motion.$subtle,
box-shadow motion.$subtle;
> img { > img {
display: block; display: block;
@ -234,7 +238,9 @@ ul.link-grid {
padding-top: layout.$normal-gap; padding-top: layout.$normal-gap;
padding-bottom: layout.$normal-gap; padding-bottom: layout.$normal-gap;
text-decoration: none; text-decoration: none;
transition: background-color motion.$subtle, color motion.$subtle, transition:
background-color motion.$subtle,
color motion.$subtle,
box-shadow motion.$subtle; box-shadow motion.$subtle;
@media screen and (min-width: layout.$breakpoint) { @media screen and (min-width: layout.$breakpoint) {

View file

@ -41,7 +41,17 @@
.plug { .plug {
fill: colors.$yellow-500; fill: colors.$yellow-500;
transition: fill #{0.3 * motion.$prominent-duration} motion.$prominent-timing; transition: fill #{0.3 * motion.$prominent-duration}
motion.$prominent-timing;
}
animation: angestoepselt-logo-hover motion.$prominent forwards;
+ span {
transform-origin: 100% 50%;
transition: transform #{0.7 * motion.$prominent-duration}
motion.$prominent-timing #{0.3 * motion.$prominent-duration};
transform: scale(1.1);
} }
@keyframes angestoepselt-logo-hover { @keyframes angestoepselt-logo-hover {
@ -55,14 +65,6 @@
transform: translateX(-0.4rem); transform: translateX(-0.4rem);
} }
} }
animation: angestoepselt-logo-hover motion.$prominent forwards;
+ span {
transform-origin: 100% 50%;
transition: transform #{0.7 * motion.$prominent-duration} motion.$prominent-timing #{0.3 * motion.$prominent-duration};
transform: scale(1.1);
}
} }
} }

View file

@ -25,7 +25,9 @@
line-height: #{$stampSize - 2 * $lineWeight}; line-height: #{$stampSize - 2 * $lineWeight};
text-align: center; text-align: center;
background-color: colors.$gray-50; background-color: colors.$gray-50;
transition: border-color motion.$subtle, background-color motion.$subtle, transition:
border-color motion.$subtle,
background-color motion.$subtle,
color motion.$subtle; color motion.$subtle;
&.small { &.small {

View file

@ -45,24 +45,6 @@
--gradient-color-10: #{colors.$green-500}; --gradient-color-10: #{colors.$green-500};
} }
@keyframes finish-hero {
0% {
stroke-width: 3px;
}
10% {
stroke-width: 3px;
}
20% {
stroke-width: 5px;
}
60% {
stroke-width: 5px;
}
100% {
stroke-width: 3px;
}
}
display: block; display: block;
height: 15vmin; height: 15vmin;
margin: layout.$huge-gap auto; margin: layout.$huge-gap auto;
@ -74,6 +56,14 @@
> .stroke-gradient { > .stroke-gradient {
@for $i from 1 through 4 { @for $i from 1 through 4 {
> stop:nth-of-type(#{$i}) { > stop:nth-of-type(#{$i}) {
animation: motion.$prominent
0s
1
normal
both
running
finish-stroke-gradient-#{$i};
@keyframes finish-stroke-gradient-#{$i} { @keyframes finish-stroke-gradient-#{$i} {
0% { 0% {
stop-color: var(--dark-idle-color); stop-color: var(--dark-idle-color);
@ -104,19 +94,13 @@
stop-color: var(--dark-idle-color); stop-color: var(--dark-idle-color);
} }
} }
animation: motion.$prominent
0s
1
normal
both
running
finish-stroke-gradient-#{$i};
} }
} }
} }
> .cable { > .cable {
animation: motion.$prominent 0s 1 normal both running finish-hero-cable;
@keyframes finish-hero-cable { @keyframes finish-hero-cable {
0% { 0% {
transform: translateX(0.5rem); transform: translateX(0.5rem);
@ -131,11 +115,14 @@
transform: none; transform: none;
} }
} }
animation: motion.$prominent 0s 1 normal both running finish-hero-cable;
} }
> .plug { > .plug {
animation:
motion.$prominent 0s 1 normal both running finish-hero-plug,
motion.$gentle 0.7s 1 normal forwards running finish-hero-plug-transition,
motion.$background 1s infinite normal none running finish-hero-plug-idle;
@keyframes finish-hero-plug { @keyframes finish-hero-plug {
0% { 0% {
transform: translateX(-0.5rem); transform: translateX(-0.5rem);
@ -175,13 +162,11 @@
fill: var(--light-idle-color); fill: var(--light-idle-color);
} }
} }
animation: motion.$prominent 0s 1 normal both running finish-hero-plug,
motion.$gentle 0.7s 1 normal forwards running finish-hero-plug-transition,
motion.$background 1s infinite normal none running finish-hero-plug-idle;
} }
> .contacts { > .contacts {
animation: motion.$prominent 0s 1 normal both running finish-hero-contacts;
@keyframes finish-hero-contacts { @keyframes finish-hero-contacts {
0% { 0% {
transform: translateX(0rem); transform: translateX(0rem);
@ -196,7 +181,23 @@
transform: none; transform: none;
} }
} }
}
animation: motion.$prominent 0s 1 normal both running finish-hero-contacts; @keyframes finish-hero {
0% {
stroke-width: 3px;
}
10% {
stroke-width: 3px;
}
20% {
stroke-width: 5px;
}
60% {
stroke-width: 5px;
}
100% {
stroke-width: 3px;
}
} }
} }

View file

@ -1,3 +1,5 @@
@use 'sass:color';
$gray-900: #1d1d1d; $gray-900: #1d1d1d;
$gray-800: #212121; $gray-800: #212121;
$gray-600: #707070; $gray-600: #707070;
@ -38,8 +40,9 @@ $inverse-text: $gray-50;
} }
@mixin card-shadow($base-color: $gray-900) { @mixin card-shadow($base-color: $gray-900) {
box-shadow: 0.1rem 0.4rem 0.4rem #{transparentize($base-color, 0.9)}, box-shadow:
0.25rem 1rem 1rem #{transparentize($base-color, 0.9)}; 0.1rem 0.4rem 0.4rem #{color.scale($base-color, $alpha: -90%)},
0.25rem 1rem 1rem #{color.scale($base-color, $alpha: -90%)};
} }
@mixin coderdojo-theme { @mixin coderdojo-theme {