file_path stringlengths 3 280 | file_language stringclasses 66 values | content stringlengths 1 1.04M | repo_name stringlengths 5 92 | repo_stars int64 0 154k | repo_description stringlengths 0 402 | repo_primary_language stringclasses 108 values | developer_username stringlengths 1 25 | developer_name stringlengths 0 30 | developer_company stringlengths 0 82 |
|---|---|---|---|---|---|---|---|---|---|
internal/yaman/network/utils.go | Go | package network
import (
"net"
"strconv"
"strings"
)
// GetRandomPort returns a random (host) port if it finds one.
func GetRandomPort() (int, error) {
l, err := net.Listen("tcp", ":0")
if err != nil {
return 0, err
}
defer l.Close()
parts := strings.Split(l.Addr().String(), ":")
return strconv.Atoi(parts[len(parts)-1])
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/network/utils_test.go | Go | package network
import "testing"
func TestGetRandomPort(t *testing.T) {
port, err := GetRandomPort()
if err != nil {
t.Errorf("expected no error, got: %v", err)
}
if port < 1 {
t.Errorf("expected strictly positive port number, got: %d", port)
}
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/registry/http.go | Go | package registry
import (
"net/http"
)
type httpClient struct {
client *http.Client
token string
}
func newHttpClientWithAuthToken(token string) httpClient {
return httpClient{
client: http.DefaultClient,
token: token,
}
}
func (c *httpClient) Get(url string, headers map[string]string) (resp *http.Response, err error) {
return c.do("GET", url, headers)
}
func (c *httpClient) Head(url string, headers map[string]string) (resp *http.Response, err error) {
return c.do("HEAD", url, headers)
}
func (c *httpClient) do(method, url string, headers map[string]string) (resp *http.Response, err error) {
req, err := http.NewRequest(method, url, nil)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+c.token)
for k, v := range headers {
req.Header.Set(k, v)
}
return c.client.Do(req)
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/registry/pull.go | Go | package registry
import (
"errors"
"fmt"
"io"
"io/fs"
"os"
"strings"
"github.com/willdurand/containers/internal/yaman/image"
)
type PullPolicy string
// PullOpts contains options for the pull operation.
type PullOpts struct {
Policy PullPolicy
Output io.Writer
}
const (
// PullAlways means that we always pull the image.
PullAlways PullPolicy = "always"
// PullMissing means that we pull the image if it does not already exist.
PullMissing PullPolicy = "missing"
// PullNever means that we never pull the image.
PullNever PullPolicy = "never"
)
var ErrInvalidPullPolicy = errors.New("invalid pull policy")
// Pull downloads and unpacks an image from a registry.
func Pull(img *image.Image, opts PullOpts) error {
if opts.Policy == PullNever {
return nil
}
st, err := os.Stat(img.BaseDir)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
return err
}
if st != nil && st.IsDir() {
if opts.Policy == PullMissing {
return nil
}
if err := os.RemoveAll(img.BaseDir); err != nil {
return err
}
}
var rOpts registryOpts
switch img.Hostname {
case "docker.io":
rOpts = registryOpts{
AuthURL: "https://auth.docker.io/token",
Service: "registry.docker.io",
IndexBaseURL: "https://index.docker.io/v2",
}
case "quay.io":
rOpts = registryOpts{
AuthURL: "https://quay.io/v2/auth",
Service: "quay.io",
IndexBaseURL: "https://quay.io/v2",
}
default:
return fmt.Errorf("unsupported registry '%s'", img.Hostname)
}
return PullFromRegistry(img, opts, rOpts)
}
func ParsePullPolicy(value string) (PullPolicy, error) {
switch strings.ToLower(value) {
case "always":
return PullAlways, nil
case "missing":
return PullMissing, nil
case "never":
return PullNever, nil
}
return "", ErrInvalidPullPolicy
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/registry/pull_test.go | Go | package registry
import (
"testing"
)
func TestParsePullPolicy(t *testing.T) {
for _, tc := range []struct {
value string
expected PullPolicy
err error
}{
{"always", PullAlways, nil},
{"missing", PullMissing, nil},
{"never", PullNever, nil},
{"", "", ErrInvalidPullPolicy},
{"invalid", "", ErrInvalidPullPolicy},
} {
policy, err := ParsePullPolicy(tc.value)
if policy != tc.expected {
t.Errorf("value: %s - got policy: %s, want: %s", tc.value, policy, tc.expected)
}
if err != tc.err {
t.Errorf("value: %s - got error: %t, want: %t", tc.value, err, tc.err)
}
}
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/registry/registry.go | Go | package registry
import (
"compress/gzip"
"encoding/json"
"errors"
"fmt"
"net/http"
"os"
"path/filepath"
"time"
"github.com/artyom/untar"
"github.com/opencontainers/go-digest"
imagespec "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/sirupsen/logrus"
"github.com/willdurand/containers/internal/yaman/image"
)
type registryOpts struct {
AuthURL string
Service string
IndexBaseURL string
}
type token struct {
Token string `json:"token"`
AccessToken string `json:"access_token"`
ExpiresIn int `json:"expires_in"`
IssuedAt time.Time `json:"issued_at"`
}
type registryClient struct {
httpClient
BaseURL string
}
func (c registryClient) GetManifest(img *image.Image) (*imagespec.Manifest, error) {
resp, err := c.Head(
fmt.Sprintf("%s/%s/manifests/%s", c.BaseURL, img.Name, img.Version),
map[string]string{"Accept": "application/vnd.docker.distribution.manifest.v2+json"},
)
if err != nil {
return nil, err
}
resp.Body.Close()
manifestDigest := resp.Header.Get("docker-content-digest")
if manifestDigest == "" {
return nil, errors.New("missing manifest digest")
}
switch manifestType := resp.Header.Get("content-type"); manifestType {
case "application/vnd.docker.distribution.manifest.v2+json":
break
default:
return nil, fmt.Errorf("unsupported manifest type: %s", manifestType)
}
resp, err = c.Get(
fmt.Sprintf("%s/%s/manifests/%s", c.BaseURL, img.Name, manifestDigest),
map[string]string{"Accept": "application/vnd.docker.distribution.manifest.v2+json"},
)
if err != nil {
return nil, err
}
defer resp.Body.Close()
manifest := new(imagespec.Manifest)
if err := json.NewDecoder(resp.Body).Decode(manifest); err != nil {
return nil, err
}
return manifest, nil
}
func (c registryClient) GetImage(img *image.Image, manifest *imagespec.Manifest) (*imagespec.Image, error) {
resp, err := c.Get(
fmt.Sprintf("%s/%s/blobs/%s", c.BaseURL, img.Name, manifest.Config.Digest),
map[string]string{"Accept": manifest.Config.MediaType},
)
if err != nil {
return nil, err
}
defer resp.Body.Close()
config := new(imagespec.Image)
if err := json.NewDecoder(resp.Body).Decode(config); err != nil {
return nil, err
}
return config, nil
}
func (c registryClient) DownloadAndUnpackLayer(img *image.Image, layer imagespec.Descriptor, diffID digest.Digest) error {
resp, err := c.Get(
fmt.Sprintf("%s/%s/blobs/%s", c.BaseURL, img.Name, layer.Digest),
map[string]string{"Accept": layer.MediaType},
)
if err != nil {
return err
}
defer resp.Body.Close()
switch layer.MediaType {
case "application/vnd.docker.image.rootfs.diff.tar.gzip":
r, err := gzip.NewReader(resp.Body)
if err != nil {
return err
}
defer r.Close()
layerDir := filepath.Join(img.LayersDir(), diffID.Hex())
if err := untar.Untar(r, layerDir); err != nil {
return err
}
default:
return fmt.Errorf("unsupported layer type '%s'", layer.MediaType)
}
logrus.WithFields(logrus.Fields{
"image": img.FQIN(),
"digest": layer.Digest,
}).Debug("unpacked layer")
return nil
}
func PullFromRegistry(img *image.Image, pullOpts PullOpts, opts registryOpts) error {
logger := logrus.WithField("image", img.FQIN())
if pullOpts.Output != nil {
fmt.Fprintf(pullOpts.Output, "Pulling %s\n", img.FQIN())
}
if err := os.MkdirAll(img.BlobsDir(), 0o755); err != nil {
return err
}
url := fmt.Sprintf(
"%s?service=%s&scope=repository:%s:pull",
opts.AuthURL,
opts.Service,
img.Name,
)
resp, err := http.Get(url)
if err != nil {
return err
}
var t token
if err := json.NewDecoder(resp.Body).Decode(&t); err != nil {
return err
}
resp.Body.Close()
logger.Debug("got authentication token")
c := registryClient{newHttpClientWithAuthToken(t.Token), opts.IndexBaseURL}
manifest, err := c.GetManifest(img)
if err != nil {
return err
}
manifestFile, err := os.Create(img.ManifestFilePath())
if err != nil {
return err
}
if err := json.NewEncoder(manifestFile).Encode(manifest); err != nil {
return err
}
logger.Debug("wrote manifest.json")
config, err := c.GetImage(img, manifest)
if err != nil {
return err
}
configFile, err := os.Create(img.ConfigFilePath())
if err != nil {
return err
}
if err := json.NewEncoder(configFile).Encode(config); err != nil {
return err
}
logger.Debug("wrote blobs/config.json")
for index, layer := range manifest.Layers {
diffID := config.RootFS.DiffIDs[index]
if err := c.DownloadAndUnpackLayer(img, layer, diffID); err != nil {
return err
}
}
if pullOpts.Output != nil {
fmt.Fprintf(pullOpts.Output, "Digest: %s\n", manifest.Config.Digest.String())
}
return nil
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/shim/shim.go | Go | package shim
import (
"bufio"
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/fs"
"io/ioutil"
"net"
"net/http"
"net/url"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
"syscall"
"time"
"github.com/sirupsen/logrus"
"github.com/willdurand/containers/internal/cli"
"github.com/willdurand/containers/internal/constants"
"github.com/willdurand/containers/internal/logs"
"github.com/willdurand/containers/internal/yacs"
"github.com/willdurand/containers/internal/yaman/container"
"golang.org/x/term"
)
const (
logFileName = "shim.log"
stateFileName = "shim.json"
slirp4netnsPidFileName = "slirp4netns.pid"
slirp4netnsApiSocketName = "slirp4netns.sock"
)
var executableNotFound = regexp.MustCompile("exec: .+? no such file or directory")
// ShimOpts contains the options that can be passed to a shim.
type ShimOpts struct {
Runtime string
}
// Shim represents an instance of the `yacs` shim.
type Shim struct {
BaseDir string
Container *container.Container
Opts ShimOpts
SocketPath string
State *yacs.YacsState
httpClient *http.Client
}
var defaultShimOpts = ShimOpts{
Runtime: "yacr",
}
// New creates a new shim instance for a given container.
func New(container *container.Container, opts ShimOpts) *Shim {
shim := &Shim{
BaseDir: container.BaseDir,
Container: container,
Opts: defaultShimOpts,
}
if opts.Runtime != "" {
shim.Opts.Runtime = opts.Runtime
}
return shim
}
// Load attempts to load a shim configuration from disk. It returns a new shim
// instance when it succeeds or an error when there is a problem.
func Load(rootDir, id string) (*Shim, error) {
containerDir := filepath.Join(container.GetBaseDir(rootDir), id)
if _, err := os.Stat(containerDir); err != nil {
return nil, fmt.Errorf("container '%s' does not exist", id)
}
data, err := os.ReadFile(filepath.Join(containerDir, stateFileName))
if err != nil {
return nil, err
}
shim := new(Shim)
if err := json.Unmarshal(data, shim); err != nil {
logrus.WithError(err).Warn("failed to load shim")
return nil, err
}
if err := shim.Container.Refresh(); err != nil {
return nil, err
}
return shim, nil
}
// Create starts a shim process, which will also create a container by invoking
// an OCI runtime.
func (s *Shim) Create(rootDir string) error {
defer func() {
if !s.Container.IsCreated() {
emptyState := new(yacs.YacsState)
if err := s.cleanUp(emptyState); err != nil {
logrus.WithError(err).Info("failed to clean-up shim")
}
}
}()
if err := s.Container.Mount(); err != nil {
return err
}
// Look up the path to the `yacs` shim binary.
yacs, err := exec.LookPath("yacs")
if err != nil {
return err
}
// Save the shim's state in case we need to load it in hooks.
if err := s.save(); err != nil {
return err
}
self, err := os.Executable()
if err != nil {
return err
}
// Prepare a list of arguments for `yacs`.
args := []string{
// Specify the base directory so that we keep most of the files in the same
// "container directory", which should also help when we need to clean-up
// everything because of an error.
"--base-dir", filepath.Join(s.BaseDir, "shim"),
"--log", s.logFilePath(),
// With JSON logs, we can parse the error message in case of an error.
"--log-format", "json",
"--bundle", s.Container.BaseDir,
"--container-id", s.Container.ID,
"--container-log-file", s.Container.LogFilePath,
"--stdio-dir", s.stdioDir(),
"--runtime", s.Opts.Runtime,
"--exit-command", self,
"--exit-command-arg", "--root",
"--exit-command-arg", rootDir,
"--exit-command-arg", "container",
"--exit-command-arg", "cleanup",
"--exit-command-arg", s.Container.ID,
}
if logrus.IsLevelEnabled(logrus.DebugLevel) {
args = append(args, []string{
// For the exit command...
"--exit-command-arg", "--debug",
// ...and for the shim.
"--debug",
}...)
}
// Create the command to execute to start the shim.
shimCmd := exec.Command(yacs, args...)
logrus.WithFields(logrus.Fields{
"command": shimCmd.String(),
}).Debug("start shim")
data, err := shimCmd.Output()
if err != nil {
err = logs.GetBetterError(s.logFilePath(), err)
if executableNotFound.MatchString(err.Error()) {
return cli.ExitCodeError{Message: err.Error(), ExitCode: 127}
}
return err
}
// When `yacs` starts, it should print a unix socket path to the standard
// output so that we can communicate with it via a HTTP API.
s.SocketPath = strings.TrimSpace(string(data))
s.Container.CreatedAt = time.Now()
return s.save()
}
// GetState queries the shim to retrieve its state and returns it.
func (s *Shim) GetState() (*yacs.YacsState, error) {
// When a shim is terminated, the `State` property should be non-nil and
// that's what we return instead of attempting to communicate with the no
// longer existing shim.
if s.State != nil {
return s.State, nil
}
c, err := s.getHttpClient()
if err != nil {
return nil, err
}
resp, err := c.Get("http://shim/")
if err != nil {
return nil, err
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, err
}
state := new(yacs.YacsState)
if err := json.Unmarshal(data, state); err != nil {
return nil, fmt.Errorf("failed to read state: %s", data)
}
return state, nil
}
// Terminate stops the shim if the container is stopped, otherwise an error is
// returned.
//
// Stopping the shim is performed in two steps: (1) delete the container, and
// (2) run some clean-up tasks like unmounting the root filesystem, stopping
// slirp4netns and terminating the shim process.
//
// Once this is done, we persist the final shim state on disk so that other
// Yaman commands can read and display information until the container is
// actually deleted. This is one of the main differences with the `Destroy()`
// method: the shim state is still available.
func (s *Shim) Terminate() error {
// We need to read the state first because we won't be able to read it once
// the container has been deleted (by the OCI runtime).
state, err := s.GetState()
if err != nil {
return err
}
if err := s.DeleteContainer(); err != nil {
return err
}
return s.cleanUp(state)
}
// Delete deletes a container that is not running, otherwise an error will be
// returned. If the container is not running and not stopped, the shim is
// terminated first.
//
// All the container files should be deleted as a result of a call to this
// method and the container will not exist anymore.
func (s *Shim) Delete() error {
state, err := s.GetState()
if err != nil {
return err
}
switch state.State.Status {
case constants.StateRunning:
return fmt.Errorf("container '%s' is %s", s.Container.ID, state.State.Status)
case constants.StateStopped:
break
default:
if err := s.cleanUp(state); err != nil {
return err
}
}
return s.Container.Delete()
}
// CopyLogs copies all the container logs stored by the shim to the provided
// writers. Note that this method does NOT use the shim's HTTP API. It reads the
// container log file directly.
func (s *Shim) CopyLogs(stdout io.Writer, stderr io.Writer, withTimestamps bool) error {
file, err := os.Open(s.Container.LogFilePath)
if err != nil {
if errors.Is(err, fs.ErrNotExist) {
return nil
}
return err
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
var l map[string]string
if err := json.Unmarshal(scanner.Bytes(), &l); err != nil {
return err
}
data := append([]byte(l["m"]), '\n')
if withTimestamps {
if t, err := time.Parse(time.RFC3339, l["t"]); err == nil {
data = append(
// TODO: I wanted to use time.RFC3339Nano but the length isn't fixed
// and that breaks the alignement when rendered.
[]byte(t.Local().Format(time.RFC3339)),
append([]byte{' ', '-', ' '}, data...)...,
)
}
}
if l["s"] == "stderr" {
stderr.Write(data)
} else {
stdout.Write(data)
}
}
return nil
}
// StartContainer tells the shim to start a container that was previously
// created.
func (s *Shim) StartContainer() error {
state, err := s.GetState()
if err != nil {
return err
}
if state.State.Status != constants.StateCreated {
return fmt.Errorf("container '%s' is %s", s.Container.ID, state.State.Status)
}
err = s.sendCommand(url.Values{
"cmd": []string{"start"},
})
if err != nil {
if executableNotFound.MatchString(err.Error()) {
// Remove the prefix set by `sendCommand`.
return cli.ExitCodeError{
Message: strings.TrimSuffix(err.Error()[7:], "\n"),
ExitCode: 127,
}
}
}
return err
}
// StopContainer tells the shim to stop the container by sending a SIGTERM
// signal first and a SIGKILL if the first signal didn't stop the container.
func (s *Shim) StopContainer() error {
if err := s.sendCommand(url.Values{
"cmd": []string{"kill"},
"signal": []string{"SIGTERM"},
}); err != nil {
return err
}
// Wait a second before reading the state again.
time.Sleep(1 * time.Second)
state, err := s.GetState()
if err != nil {
// After a second, it is possible that the container has exited properly
// after SIGTEM and we ran 'container cleanup'.
return nil
}
if state.State.Status != constants.StateStopped {
logrus.WithField("id", s.Container.ID).Debug("SIGTERM failed, sending SIGKILL")
if err := s.sendCommand(url.Values{
"cmd": []string{"kill"},
"signal": []string{"SIGKILL"},
}); err != nil {
return err
}
// Give some more time to the exit command.
time.Sleep(1 * time.Second)
}
return nil
}
// DeleteContainer tells the shim to delete the container.
func (s *Shim) DeleteContainer() error {
state, err := s.GetState()
if err != nil {
return err
}
if state.State.Status != constants.StateStopped {
return fmt.Errorf("container '%s' is %s", s.Container.ID, state.State.Status)
}
return s.sendCommand(url.Values{"cmd": []string{"delete"}})
}
// OpenStreams opens and returns the stdio streams of the container.
func (s *Shim) OpenStreams() (*os.File, *os.File, *os.File, error) {
stdin, err := os.OpenFile(filepath.Join(s.stdioDir(), "0"), os.O_WRONLY, 0)
if err != nil {
return nil, nil, nil, err
}
stdout, err := os.Open(filepath.Join(s.stdioDir(), "1"))
if err != nil {
return nil, nil, nil, err
}
stderr, err := os.Open(filepath.Join(s.stdioDir(), "2"))
if err != nil {
return nil, nil, nil, err
}
return stdin, stdout, stderr, nil
}
// Attach attaches the provided Input/Output streams to the container.
func (s *Shim) Attach(attachStdin, attachStdout, attachStderr bool) error {
stdin, stdout, stderr, err := s.OpenStreams()
if err != nil {
return err
}
defer stdin.Close()
defer stdout.Close()
defer stderr.Close()
// In interactive mode, we keep `stdin` open, otherwise we close it
// immediately and we only care about `stdout` and `stderr`.
if attachStdin {
go func() {
io.Copy(stdin, os.Stdin)
if !s.Container.Opts.Tty {
// HACK: this isn't how we should handle EOF on stdin but there is an
// issue with using the named pipes directly. Closing `stdin` here
// isn't enough because the shim keeps it open (on purpose...). We need
// "something" to close here so that the shim can close the named pipe
// itself but sending the string below isn't what we should be doing...
stdin.Write([]byte("\nTHIS_IS_NOT_HOW_WE_SHOULD_CLOSE_A_PIPE\n"))
}
}()
} else {
stdin.Close()
}
if s.Container.Opts.Tty {
// TODO: maybe handle the case where we want to detach from the container
// without killing it. Docker has a special key sequence for detaching a
// container.
// We force the current terminal to switch to "raw mode" because we don't
// want it to mess with the PTY set up by the container itself.
oldState, err := term.MakeRaw(int(os.Stdin.Fd()))
if err != nil {
return err
}
defer term.Restore(int(os.Stdin.Fd()), oldState)
go io.Copy(stdin, os.Stdin)
// Block on the stream coming from the container so that when it exits, we
// can also exit this command.
io.Copy(os.Stdout, stdout)
} else {
// TODO: proxy all received signals to the container process and maybe add
// an option like Docker's `--sig-proxy` one.
var wg sync.WaitGroup
// We copy the data from the container to the appropriate streams as long
// as we can. When the container process exits, the shimm should close the
// streams on its end, which should allow `copyStd()` to complete.
if attachStdout {
wg.Add(1)
go copyStd(stdout, os.Stdout, &wg)
}
if attachStderr {
wg.Add(1)
go copyStd(stderr, os.Stderr, &wg)
}
wg.Wait()
}
return nil
}
// Slirp4netnsPidFilePath returns the path to the file where the slirp4netns
// process ID should be written when it is started.
func (s *Shim) Slirp4netnsPidFilePath() string {
return filepath.Join(s.BaseDir, slirp4netnsPidFileName)
}
// Slirp4netnsApiSocketPath returns the path to the API socket used to
// communicate with the slirp4netns process.
func (s *Shim) Slirp4netnsApiSocketPath() string {
return filepath.Join(s.BaseDir, slirp4netnsApiSocketName)
}
// Recreate stops a container if it is running and then re-create a new
// container. If the container is already stopped, we only re-create the
// container.
func (s *Shim) Recreate(rootDir string) error {
state, err := s.GetState()
if err != nil {
return err
}
// When the container has only been created, we can early return because
// there is no need to recreate it.
if state.State.Status == constants.StateCreated {
return nil
}
// Backup the remove option because we might have to override it if the
// container is not stopped and we have to stop it.
remove := s.Container.Opts.Remove
if state.State.Status != constants.StateStopped {
// We need to set this option to `false` unconditionally because we
// don't want to auto-remove the container when we stop it since we want
// to restart it.
s.Container.Opts.Remove = false
if err := s.save(); err != nil {
return err
}
if err := s.StopContainer(); err != nil {
return err
}
}
// Reset shim state
s.State = nil
// Restore remove option
s.Container.Opts.Remove = remove
// Reset container state
s.Container.ExitedAt = time.Time{}
if err := s.save(); err != nil {
return err
}
// At this point the container has been stopped: the shim process is gone
// and the rootfs has been unmounted. We should create a new container and
// its shim process.
return s.Create(rootDir)
}
func (s *Shim) cleanUp(state *yacs.YacsState) error {
if err := s.Container.Unmount(); err != nil {
return err
}
if _, err := os.Stat(s.Slirp4netnsPidFilePath()); err == nil {
if data, err := os.ReadFile(s.Slirp4netnsPidFilePath()); err == nil {
if slirpPid, err := strconv.Atoi(string(bytes.TrimSpace(data))); err == nil {
logrus.WithField("pid", slirpPid).Debug("terminating slirp4netns")
if err := syscall.Kill(slirpPid, syscall.SIGTERM); err != nil {
logrus.WithError(err).Debug("failed to terminate slirp4netns")
}
}
}
if err := os.Remove(s.Slirp4netnsPidFilePath()); err != nil {
logrus.WithError(err).Debug("failed to delete slirp4netns pid file")
}
}
if err := os.Remove(s.Slirp4netnsApiSocketPath()); err != nil {
logrus.WithError(err).Debug("failed to delete slirp4netns socket file")
}
// Terminate the shim process by sending a DELETE request.
req, err := http.NewRequest(http.MethodDelete, "http://shim/", nil)
if err != nil {
return err
}
c, err := s.getHttpClient()
if err != nil {
return err
}
resp, err := c.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
// Let's persist a copy of the shim state (before it got terminated) on disk.
s.State = state
s.SocketPath = ""
s.Container.ExitedAt = time.Now()
return s.save()
}
func (s *Shim) sendCommand(values url.Values) error {
c, err := s.getHttpClient()
if err != nil {
return err
}
resp, err := c.PostForm("http://shim/", values)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode > 300 {
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
return fmt.Errorf("%s: %s", values.Get("cmd"), data)
}
return nil
}
func (s *Shim) getHttpClient() (*http.Client, error) {
if s.SocketPath == "" {
return nil, fmt.Errorf("container '%s' is not running", s.Container.ID)
}
if s.httpClient == nil {
s.httpClient = &http.Client{
Transport: &http.Transport{
DialContext: func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial("unix", s.SocketPath)
},
},
}
}
return s.httpClient, nil
}
func (s *Shim) save() error {
// Persist the state of the shim to disk.
data, err := json.Marshal(s)
if err != nil {
return err
}
return ioutil.WriteFile(s.stateFilePath(), data, 0o644)
}
func (s *Shim) logFilePath() string {
return filepath.Join(s.BaseDir, logFileName)
}
func (s *Shim) stateFilePath() string {
return filepath.Join(s.BaseDir, stateFileName)
}
func (s *Shim) stdioDir() string {
return s.BaseDir
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
internal/yaman/shim/utils.go | Go | package shim
import (
"bufio"
"fmt"
"io"
"os"
"sync"
)
func copyStd(s *os.File, w io.Writer, wg *sync.WaitGroup) {
defer wg.Done()
reader := bufio.NewReader(s)
for {
line, err := reader.ReadString('\n')
if err != nil {
break
}
fmt.Fprint(w, line)
}
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
tests/integration/base_helpers.bash | Shell | #!/usr/bin/env bash
load "../helpers/bats-support/load"
load "../helpers/bats-assert/load"
bats_require_minimum_version 1.5.0 | willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
tests/integration/yacr/helpers.bash | Shell | #!/usr/bin/env bash
load '../base_helpers'
function run_yacr() {
run yacr "$@"
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
tests/integration/yacs/helpers.bash | Shell | #!/usr/bin/env bash
load '../base_helpers'
function run_yacs() {
run yacs "$@"
}
function get_state() {
local sock="$1"
run curl -s --unix-socket "$sock" http://shim/
assert_success
echo "$output"
} | willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
tests/integration/yaman/helpers.bash | Shell | #!/usr/bin/env bash
load '../base_helpers'
TIMEOUT=30s
DOCKER_ALPINE=docker.io/library/alpine
DOCKER_HELLO_WORLD=docker.io/willdurand/hello-world
DOCKER_REDIS=docker.io/library/redis
QUAY_ALPINE=quay.io/aptible/alpine
function run_yaman() {
run timeout --foreground "$TIMEOUT" yaman "$@"
}
function run_yaman_and_get_cid() {
local cid=""
run --separate-stderr timeout --foreground "$TIMEOUT" yaman "$@"
assert_success
cid="$output"
echo "$cid"
}
function random_string() {
local length=${1:-10}
head /dev/urandom | tr -dc A-Z0-9 | head -c"$length"
}
function inspect() {
local cid="$1"
run yaman container inspect "$@"
assert_success
echo "$output"
}
| willdurand/containers | 9 | 📦 This is a repository with some code I wrote to learn more about containers. | Go | willdurand | William Durand | mozilla |
docs/10.bootstrap.js | JavaScript | "use strict";(self.webpackChunkxpidump_webapp=self.webpackChunkxpidump_webapp||[]).push([[10],{194:(n,t,e)=>{e.a(n,(async(n,r)=>{try{e.d(t,{bi:()=>_.bi});var i=e(238),_=e(537),o=n([i]);i=(o.then?(await o)():o)[0],(0,_.oT)(i),r()}catch(n){r(n)}}))},537:(n,t,e)=>{let r;function i(n){r=n}e.d(t,{CF:()=>S,G2:()=>O,Je:()=>E,Kx:()=>$,Or:()=>F,Rx:()=>j,bi:()=>x,fY:()=>A,h4:()=>C,hd:()=>I,m_:()=>v,oT:()=>i,pT:()=>T,ug:()=>k}),n=e.hmd(n);const _=new Array(128).fill(void 0);function o(n){return _[n]}_.push(void 0,null,!0,!1);let s=_.length;function c(n){const t=o(n);return function(n){n<132||(_[n]=s,s=n)}(n),t}function a(n){s===_.length&&_.push(_.length+1);const t=s;return s=_[t],_[t]=n,t}let g=new("undefined"==typeof TextDecoder?(0,n.require)("util").TextDecoder:TextDecoder)("utf-8",{ignoreBOM:!0,fatal:!0});g.decode();let d=null;function u(){return null!==d&&0!==d.byteLength||(d=new Uint8Array(r.memory.buffer)),d}function f(n,t){return n>>>=0,g.decode(u().subarray(n,n+t))}function l(n){const t=typeof n;if("number"==t||"boolean"==t||null==n)return`${n}`;if("string"==t)return`"${n}"`;if("symbol"==t){const t=n.description;return null==t?"Symbol":`Symbol(${t})`}if("function"==t){const t=n.name;return"string"==typeof t&&t.length>0?`Function(${t})`:"Function"}if(Array.isArray(n)){const t=n.length;let e="[";t>0&&(e+=l(n[0]));for(let r=1;r<t;r++)e+=", "+l(n[r]);return e+="]",e}const e=/\[object ([^\]]+)\]/.exec(toString.call(n));let r;if(!(e.length>1))return toString.call(n);if(r=e[1],"Object"==r)try{return"Object("+JSON.stringify(n)+")"}catch(n){return"Object"}return n instanceof Error?`${n.name}: ${n.message}\n${n.stack}`:r}let b=0,h=new("undefined"==typeof TextEncoder?(0,n.require)("util").TextEncoder:TextEncoder)("utf-8");const w="function"==typeof h.encodeInto?function(n,t){return h.encodeInto(n,t)}:function(n,t){const e=h.encode(n);return t.set(e),{read:n.length,written:e.length}};let p=null;function y(){return null!==p&&0!==p.byteLength||(p=new Int32Array(r.memory.buffer)),p}const m="undefined"==typeof FinalizationRegistry?{register:()=>{},unregister:()=>{}}:new FinalizationRegistry((n=>r.__wbg_xpi_free(n>>>0)));class x{__destroy_into_raw(){const n=this.__wbg_ptr;return this.__wbg_ptr=0,m.unregister(this),n}free(){const n=this.__destroy_into_raw();r.__wbg_xpi_free(n)}constructor(n){const t=function(n,t){const e=t(1*n.length,1)>>>0;return u().set(n,e/1),b=n.length,e}(n,r.__wbindgen_malloc),e=b,i=r.xpi_new(t,e);return this.__wbg_ptr=i>>>0,this}to_js(){return c(r.xpi_to_js(this.__wbg_ptr))}get manifest(){return c(r.xpi_manifest(this.__wbg_ptr))}get has_manifest(){return 0!==r.xpi_has_manifest(this.__wbg_ptr)}get has_pkcs7_sig(){return 0!==r.xpi_has_pkcs7_sig(this.__wbg_ptr)}get env(){let n,t;try{const _=r.__wbindgen_add_to_stack_pointer(-16);r.xpi_env(_,this.__wbg_ptr);var e=y()[_/4+0],i=y()[_/4+1];return n=e,t=i,f(e,i)}finally{r.__wbindgen_add_to_stack_pointer(16),r.__wbindgen_free(n,t,1)}}get pkcs7_algorithm(){let n,t;try{const _=r.__wbindgen_add_to_stack_pointer(-16);r.xpi_pkcs7_algorithm(_,this.__wbg_ptr);var e=y()[_/4+0],i=y()[_/4+1];return n=e,t=i,f(e,i)}finally{r.__wbindgen_add_to_stack_pointer(16),r.__wbindgen_free(n,t,1)}}get kind(){let n,t;try{const _=r.__wbindgen_add_to_stack_pointer(-16);r.xpi_kind(_,this.__wbg_ptr);var e=y()[_/4+0],i=y()[_/4+1];return n=e,t=i,f(e,i)}finally{r.__wbindgen_add_to_stack_pointer(16),r.__wbindgen_free(n,t,1)}}get has_cose_sig(){return 0!==r.xpi_has_cose_sig(this.__wbg_ptr)}get cose_algorithm(){let n,t;try{const _=r.__wbindgen_add_to_stack_pointer(-16);r.xpi_cose_algorithm(_,this.__wbg_ptr);var e=y()[_/4+0],i=y()[_/4+1];return n=e,t=i,f(e,i)}finally{r.__wbindgen_add_to_stack_pointer(16),r.__wbindgen_free(n,t,1)}}get is_enterprise(){return 0!==r.xpi_is_enterprise(this.__wbg_ptr)}}function k(n){c(n)}function T(n){return a(n)}function $(n){return a(BigInt.asUintN(64,n))}function v(n){return a(o(n))}function C(n,t){return a(f(n,t))}function E(n,t,e){o(n)[c(t)]=c(e)}function I(n,t){return a(new Error(f(n,t)))}function O(){return a(new Array)}function j(){return a(new Object)}function S(n,t,e){o(n)[t>>>0]=c(e)}function A(n,t){const e=function(n,t,e){if(void 0===e){const e=h.encode(n),r=t(e.length,1)>>>0;return u().subarray(r,r+e.length).set(e),b=e.length,r}let r=n.length,i=t(r,1)>>>0;const _=u();let o=0;for(;o<r;o++){const t=n.charCodeAt(o);if(t>127)break;_[i+o]=t}if(o!==r){0!==o&&(n=n.slice(o)),i=e(i,r,r=o+3*n.length,1)>>>0;const t=u().subarray(i+o,i+r);o+=w(n,t).written,i=e(i,r,o,1)>>>0}return b=o,i}(l(o(t)),r.__wbindgen_malloc,r.__wbindgen_realloc),i=b;y()[n/4+1]=i,y()[n/4+0]=e}function F(n,t){throw new Error(f(n,t))}},10:(n,t,e)=>{e.a(n,(async(n,r)=>{try{e.r(t);var i=e(194),_=n([i]);i=(_.then?(await _)():_)[0];const o=n=>{const t=document.getElementById("output-pretty"),e=document.getElementById("output-raw");if(!n.has_manifest)return t.textContent="⚠️ This file doesn't look like an add-on.",void(e.textContent="");const{cose_algorithm:r,env:i,has_cose_sig:_,has_pkcs7_sig:o,is_enterprise:s,kind:c,manifest:a,pkcs7_algorithm:g}=n,d=o?`<strong>${c}</strong> add-on`:"add-on";t.innerHTML=`\n ✅ ${a.id?`This ${d} has the following ID in its manifest: <code>${a.id}</code>`:`This ${d} does not have an ID in its manifest`}. Its version is: <code>${a.version}</code>.\n <br>\n <br>\n ${o?`${_?"🔐":"🔓"} It has been signed with the <strong>${i}</strong> root certificate. ${_?"This add-on is dual-signed (PKCS#7 and COSE)":"This add-on is <strong>not</strong> signed with COSE"}. The PKCS#7 digest algorithm is: <strong>${g}</strong>. ${_?`The COSE algorithm is: <strong>${r}</strong>.`:""}`:"❌ It doesn't appear to be signed."}\n `,s&&(t.innerHTML+="\n <br>\n <br>\n 👔 This is an <strong>enterprise</strong> add-on.\n "),e.textContent=JSON.stringify(n.to_js(),null,2)};document.getElementById("input-file").addEventListener("change",(n=>{const{files:t}=n.target,e=new FileReader;e.onload=function(n){const t=new i.bi(new Uint8Array(e.result));o(t)},e.readAsArrayBuffer(t[0])}),!1),r()}catch(n){r(n)}}))},238:(n,t,e)=>{var r=e(537);n.exports=e.v(t,n.id,"e97b6b9521bf5e52cfd2",{"./xpidump_bg.js":{__wbindgen_object_drop_ref:r.ug,__wbindgen_number_new:r.pT,__wbindgen_bigint_from_u64:r.Kx,__wbindgen_object_clone_ref:r.m_,__wbindgen_string_new:r.h4,__wbg_set_f975102236d3c502:r.Je,__wbindgen_error_new:r.hd,__wbg_new_16b304a2cfa7ff4a:r.G2,__wbg_new_72fb9a18b5ae2624:r.Rx,__wbg_set_d4638f722068f043:r.CF,__wbindgen_debug_string:r.fY,__wbindgen_throw:r.Or}})}}]); | willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
docs/bootstrap.js | JavaScript | (()=>{var e,r,t,o,n,a,i={},s={};function p(e){var r=s[e];if(void 0!==r)return r.exports;var t=s[e]={id:e,loaded:!1,exports:{}};return i[e](t,t.exports,p),t.loaded=!0,t.exports}p.m=i,e="function"==typeof Symbol?Symbol("webpack queues"):"__webpack_queues__",r="function"==typeof Symbol?Symbol("webpack exports"):"__webpack_exports__",t="function"==typeof Symbol?Symbol("webpack error"):"__webpack_error__",o=e=>{e&&!e.d&&(e.d=1,e.forEach((e=>e.r--)),e.forEach((e=>e.r--?e.r++:e())))},p.a=(n,a,i)=>{var s;i&&((s=[]).d=1);var p,c,u,l=new Set,d=n.exports,b=new Promise(((e,r)=>{u=r,c=e}));b[r]=d,b[e]=e=>(s&&e(s),l.forEach(e),b.catch((e=>{}))),n.exports=b,a((n=>{var a;p=(n=>n.map((n=>{if(null!==n&&"object"==typeof n){if(n[e])return n;if(n.then){var a=[];a.d=0,n.then((e=>{i[r]=e,o(a)}),(e=>{i[t]=e,o(a)}));var i={};return i[e]=e=>e(a),i}}var s={};return s[e]=e=>{},s[r]=n,s})))(n);var i=()=>p.map((e=>{if(e[t])throw e[t];return e[r]})),c=new Promise((r=>{(a=()=>r(i)).r=0;var t=e=>e!==s&&!l.has(e)&&(l.add(e),e&&!e.d&&(a.r++,e.push(a)));p.map((r=>r[e](t)))}));return a.r?c:i()}),(e=>(e?u(b[t]=e):c(d),o(s)))),s&&(s.d=0)},p.d=(e,r)=>{for(var t in r)p.o(r,t)&&!p.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},p.f={},p.e=e=>Promise.all(Object.keys(p.f).reduce(((r,t)=>(p.f[t](e,r),r)),[])),p.u=e=>e+".bootstrap.js",p.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),p.hmd=e=>((e=Object.create(e)).children||(e.children=[]),Object.defineProperty(e,"exports",{enumerable:!0,set:()=>{throw new Error("ES Modules may not assign module.exports or exports.*, Use ESM export syntax, instead: "+e.id)}}),e),p.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),n={},a="xpidump-webapp:",p.l=(e,r,t,o)=>{if(n[e])n[e].push(r);else{var i,s;if(void 0!==t)for(var c=document.getElementsByTagName("script"),u=0;u<c.length;u++){var l=c[u];if(l.getAttribute("src")==e||l.getAttribute("data-webpack")==a+t){i=l;break}}i||(s=!0,(i=document.createElement("script")).charset="utf-8",i.timeout=120,p.nc&&i.setAttribute("nonce",p.nc),i.setAttribute("data-webpack",a+t),i.src=e),n[e]=[r];var d=(r,t)=>{i.onerror=i.onload=null,clearTimeout(b);var o=n[e];if(delete n[e],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach((e=>e(t))),r)return r(t)},b=setTimeout(d.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=d.bind(null,i.onerror),i.onload=d.bind(null,i.onload),s&&document.head.appendChild(i)}},p.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},p.v=(e,r,t,o)=>{var n=fetch(p.p+""+t+".module.wasm");return"function"==typeof WebAssembly.instantiateStreaming?WebAssembly.instantiateStreaming(n,o).then((r=>Object.assign(e,r.instance.exports))):n.then((e=>e.arrayBuffer())).then((e=>WebAssembly.instantiate(e,o))).then((r=>Object.assign(e,r.instance.exports)))},(()=>{var e;p.g.importScripts&&(e=p.g.location+"");var r=p.g.document;if(!e&&r&&(r.currentScript&&(e=r.currentScript.src),!e)){var t=r.getElementsByTagName("script");t.length&&(e=t[t.length-1].src)}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),p.p=e})(),(()=>{var e={179:0};p.f.j=(r,t)=>{var o=p.o(e,r)?e[r]:void 0;if(0!==o)if(o)t.push(o[2]);else{var n=new Promise(((t,n)=>o=e[r]=[t,n]));t.push(o[2]=n);var a=p.p+p.u(r),i=new Error;p.l(a,(t=>{if(p.o(e,r)&&(0!==(o=e[r])&&(e[r]=void 0),o)){var n=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;i.message="Loading chunk "+r+" failed.\n("+n+": "+a+")",i.name="ChunkLoadError",i.type=n,i.request=a,o[1](i)}}),"chunk-"+r,r)}};var r=(r,t)=>{var o,n,[a,i,s]=t,c=0;if(a.some((r=>0!==e[r]))){for(o in i)p.o(i,o)&&(p.m[o]=i[o]);s&&s(p)}for(r&&r(t);c<a.length;c++)n=a[c],p.o(e,n)&&e[n]&&e[n][0](),e[n]=0},t=self.webpackChunkxpidump_webapp=self.webpackChunkxpidump_webapp||[];t.forEach(r.bind(null,0)),t.push=r.bind(null,t.push.bind(t))})(),p.e(10).then(p.bind(p,10)).catch((e=>console.error("Error importing `index.js`:",e)))})(); | willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
docs/index.html | HTML | <!doctype html><html lang="en"><head><title>xpidump</title><meta charset="utf-8"><meta name="viewport" content="width=device-width,initial-scale=1"/><link rel="stylesheet" href="https://unpkg.com/terminal.css@0.7.4/dist/terminal.min.css"/><style>pre, pre code {
background-color: var(--code-bg-color);
}
pre code {
overflow-x: initial;
}
table td {
line-break: anywhere;
}
footer p {
margin-top: 1em;
font-size: 0.8em;
text-align: center;
}</style><script defer="defer" src="bootstrap.js"></script></head><body class="terminal"><div class="container"><header class="header"><div class="logo"><a href="./">xpidump</a></div><hr></header><main><noscript><div class="terminal-alert terminal-alert-error">Please enable JavaScript on this page</div></noscript><div><p>This page allows you to dump information about an <em>XPI</em> file. No "server-side" is involved, everything is performed locally in the browser. Select a file in the "Input:" section and the information about it will be displayed under "Output:".</p></div><div class="input"><h2>Input:</h2><div class="form-group"><label for="input-file">Select a file:</label> <input id="input-file" type="file" accept=".xpi,.zip"></div></div><div class="output"><h2>Output:</h2><p id="output-pretty"></p><details><summary>raw output</summary><pre><code id="output-raw"></code></pre></details></div></main><footer><p>Made by <a href="http://mamot.fr/@willdurand">@willdurand</a> – The source code is available on <a href="https://github.com/willdurand/xpidump">GitHub</a> (MIT license) - <a href="https://github.com/willdurand/xpidump/commit/517ab89b4aa05ef63503e93061bbcd340aaa35f3">517ab89</a>.</p></footer></div><script src="./bootstrap.js"></script></body></html> | willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/lib.rs | Rust | //! A library to parse XPI files.
//!
//! # Example
//!
//! ```
//! use std::fs;
//! use xpidump::XPI;
//! use zip::ZipArchive;
//!
//! let mut archive = ZipArchive::new(
//! fs::File::open("tests/fixtures/dev-new.xpi").unwrap()
//! ).unwrap();
//! let xpi = XPI::new(&mut archive);
//!
//! println!("Add-on ID in the manifest: {}", xpi.manifest.id.unwrap());
//! // Add-on ID in the manifest: {c208c857-c691-4c69-bfa9-3c2b04f4a0ec}
//! ```
mod xpi;
pub use xpi::*;
#[cfg(target_arch = "wasm32")]
pub mod wasm_bindings;
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/main.rs | Rust | use clap::Parser;
use std::{fs::File, path::PathBuf};
use xpidump::XPI;
use zip::ZipArchive;
#[derive(clap::ValueEnum, Clone)]
enum Format {
Text,
Json,
}
/// A simple tool to dump information about XPI files.
#[derive(Parser)]
#[command(version, about, long_about = None)]
struct Args {
/// The path to an XPI file
file: PathBuf,
#[clap(short, long, value_enum, default_value = "text")]
format: Format,
}
fn main() {
let args = Args::parse();
let file = File::open(args.file).unwrap_or_else(|_| panic!("error: failed to open XPI file"));
let mut archive =
ZipArchive::new(file).unwrap_or_else(|_| panic!("error: failed to read XPI file"));
let xpi = XPI::new(&mut archive);
println!(
"{}",
match args.format {
Format::Json => serde_json::to_string(&xpi).unwrap(),
Format::Text => xpi.to_string(),
}
);
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/wasm_bindings.rs | Rust | use crate::{Environment, SignatureKind, XPI as InnerXPI};
use std::io::Cursor;
use wasm_bindgen::prelude::*;
use zip::ZipArchive;
// This file contains a thin layer to expose the `xpidump` information in a WASM environment.
#[wasm_bindgen]
pub struct XPI {
xpi: InnerXPI,
}
#[wasm_bindgen]
impl XPI {
#[wasm_bindgen(constructor)]
pub fn new(data: Vec<u8>) -> XPI {
let reader = Cursor::new(data);
let mut zip_archive = ZipArchive::new(reader).unwrap();
XPI {
xpi: InnerXPI::new(&mut zip_archive),
}
}
#[wasm_bindgen]
pub fn to_js(&self) -> JsValue {
serde_wasm_bindgen::to_value(&self.xpi).unwrap()
}
#[wasm_bindgen(getter)]
pub fn manifest(&self) -> JsValue {
serde_wasm_bindgen::to_value(&self.xpi.manifest).unwrap()
}
#[wasm_bindgen(getter)]
pub fn has_manifest(&self) -> bool {
self.xpi.manifest.exists()
}
#[wasm_bindgen(getter)]
pub fn has_pkcs7_sig(&self) -> bool {
self.xpi.signatures.pkcs7.exists()
}
#[wasm_bindgen(getter)]
pub fn env(&self) -> String {
match self.xpi.signatures.pkcs7.env() {
Environment::Unknown => "unknown".to_string(),
Environment::Development => "development".to_string(),
Environment::Staging => "staging".to_string(),
Environment::Production => "production".to_string(),
}
}
#[wasm_bindgen(getter)]
pub fn pkcs7_algorithm(&self) -> String {
self.xpi
.signatures
.pkcs7
.algorithm
.as_deref()
.unwrap_or("")
.to_owned()
}
#[wasm_bindgen(getter)]
pub fn kind(&self) -> String {
match self.xpi.signatures.pkcs7.kind() {
SignatureKind::Regular => "regular".to_string(),
SignatureKind::Privileged => "privileged".to_string(),
SignatureKind::System => "system".to_string(),
}
}
#[wasm_bindgen(getter)]
pub fn has_cose_sig(&self) -> bool {
self.xpi.signatures.cose.exists()
}
#[wasm_bindgen(getter)]
pub fn cose_algorithm(&self) -> String {
self.xpi
.signatures
.cose
.algorithm
.as_deref()
.unwrap_or("")
.to_owned()
}
#[wasm_bindgen(getter)]
pub fn is_enterprise(&self) -> bool {
self.xpi.is_enterprise()
}
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/xpi.rs | Rust | mod cose_ish;
mod manifest;
mod signatures;
use serde::{Deserialize, Serialize};
use std::{fmt, io};
use zip::ZipArchive;
pub use manifest::*;
pub use signatures::*;
#[derive(Debug, PartialEq, Deserialize, Serialize)]
/// Represents the recommendation state values.
pub enum RecommendationState {
#[serde(rename = "recommended")]
/// The XPI is a recommended add-on.
Recommended,
#[serde(rename = "recommended-android")]
/// The XPI is a recommended add-on for Firefox for Android.
RecommendedAndroid,
#[serde(rename = "line")]
/// The XPI is a line add-on.
Line,
#[serde(rename = "verified")]
/// The XPI is a verified add-on.
Verified,
}
#[derive(Deserialize, Serialize)]
/// Represents the validity of the recommendation state.
pub struct Validity {
pub not_before: String,
pub not_after: String,
}
#[derive(Deserialize, Serialize)]
/// Represents the recommendation state of an XPI.
pub struct Recommendation {
pub schema_version: u64,
pub addon_id: String,
pub states: Vec<RecommendationState>,
pub validity: Validity,
}
#[derive(Serialize)]
/// Represents an XPI file.
///
/// XPI files are very similar to ZIP files and used to package add-ons for Firefox.
pub struct XPI {
/// Information about the `manifest.json` file.
pub manifest: Manifest,
/// Information about the signatures found in the XPI.
pub signatures: Signatures,
/// The recommendation state found in the XPI file, if any.
pub recommendation: Option<Recommendation>,
}
impl XPI {
/// Constructs a new `XPI` from an instance of
/// [`ZipArchive`](https://docs.rs/zip/latest/zip/read/struct.ZipArchive.html).
pub fn new<R: io::Read + io::Seek>(archive: &mut ZipArchive<R>) -> XPI {
let mut recommendation = None;
if let Ok(rec_file) = archive.by_name("mozilla-recommendation.json") {
recommendation = serde_json::from_reader::<_, Recommendation>(rec_file).ok();
}
XPI {
manifest: Manifest::parse(archive),
signatures: Signatures::parse(archive),
recommendation,
}
}
/// Whether the XPI is a _recommended_ add-on, i.e. it "looks" signed (i.e. it embeds
/// signature files) and it has a recommendation state.
pub fn is_recommended(&self) -> bool {
self.signatures.has_signatures() && self.recommendation.is_some()
}
/// Whether the XPI is an _enterprise_ add-on, i.e. it "looks" signed (i.e. it embeds
/// signature files) and it has the enterprise flag (property) set in the manifest.
pub fn is_enterprise(&self) -> bool {
self.signatures.has_signatures() && self.manifest.has_enterprise_flag()
}
}
impl fmt::Display for XPI {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let recommendation = if let Some(rec) = &self.recommendation {
format!("{:?}", rec.states)
} else {
"NONE".to_owned()
};
write!(
f,
"{}\n\nRECOMMENDATION:\n {}\n\n{}",
self.manifest, recommendation, self.signatures,
)
}
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/xpi/cose_ish.rs | Rust | use cms::cert::x509::{der::Decode, Certificate};
use minicbor::data::Int;
use minicbor::decode::Decoder;
use std::convert::From;
const COSE_SIGN_TAG: u64 = 98;
const COSE_ALG: u64 = 1;
const COSE_KID: u64 = 4;
pub enum CoseError {
InvalidTag,
UnexpectedType,
MalformedInput,
}
impl From<minicbor::decode::Error> for CoseError {
fn from(_: minicbor::decode::Error) -> Self {
CoseError::UnexpectedType
}
}
pub struct CoseSign {
pub algorithm: String,
pub certificates: Vec<Certificate>,
}
impl CoseSign {
pub(crate) fn parse(bytes: &[u8]) -> Result<Self, CoseError> {
let mut decoder = Decoder::new(bytes);
if decoder.tag()?.as_u64() != COSE_SIGN_TAG {
return Err(CoseError::InvalidTag);
}
// We expect an array with 4 entries:
//
// COSE_Sign = [
// protected : serialized_map,
// unprotected : header_map
// payload : nil,
// signatures : [COSE_Signature, ...]
// ]
//
match decoder.array()? {
Some(4) => {}
_ => return Err(CoseError::MalformedInput),
};
// protected should contain the intermediate certificates.
let mut certificates = vec![];
let protected = decoder.bytes()?;
let mut dec = Decoder::new(protected);
// We expect a map with an array in it.
match dec.map()? {
Some(1) => {}
_ => return Err(CoseError::MalformedInput),
};
if dec.int()? == Int::from(COSE_KID) {
// Important: this is no RFC 8152 compliant because `kid` should be `bstr`, not an
// `array`. See: https://github.com/franziskuskiefer/cose-rust/issues/60
let size = match dec.array()? {
Some(size) => size,
None => return Err(CoseError::MalformedInput),
};
// Decode all the intermediate certificates.
for _ in 0..size {
let data = dec.bytes()?;
if let Ok(cert) = Certificate::from_der(data) {
certificates.push(cert);
}
}
}
// unprotected should be an empty map.
match decoder.map()? {
Some(0) => {}
_ => return Err(CoseError::MalformedInput),
};
// payload should be null because this is a detached signature.
decoder.null()?;
// signatures
let mut algorithm: String = "UNKNOWN".to_owned();
let size = match decoder.array()? {
Some(size) => size,
None => return Err(CoseError::MalformedInput),
};
for _ in 0..size {
// COSE_Signature = [
// protected : serialized_map,
// unprotected : header_map
// signature : bstr
// ]
match decoder.array()? {
Some(3) => {}
_ => return Err(CoseError::MalformedInput),
};
let protected = decoder.bytes()?;
let mut dec = Decoder::new(protected);
// We expect a map with 2 entries: `alg` and `kid`.
match dec.map()? {
Some(2) => {}
_ => return Err(CoseError::MalformedInput),
};
if dec.int()? == Int::from(COSE_ALG) {
let val = dec.int()?;
algorithm = if val == Int::from(-7) {
"ES256".to_owned()
} else if val == Int::from(-35) {
"ES384".to_owned()
} else if val == Int::from(-36) {
"ES512".to_owned()
} else {
algorithm
};
}
if dec.int()? == Int::from(COSE_KID) {
let data = dec.bytes()?;
if let Ok(cert) = Certificate::from_der(data) {
certificates.push(cert);
}
}
// unprotected
decoder.map()?;
// signature
decoder.bytes()?;
}
Ok(CoseSign {
algorithm,
certificates,
})
}
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/xpi/manifest.rs | Rust | use json_comments::StripComments;
use serde::Serialize;
use std::{fmt, io};
use zip::ZipArchive;
#[derive(Default, Serialize)]
/// Represents the information contained in the `manifest.json` file.
pub struct Manifest {
present: bool,
/// The add-on ID found in the manifest, if any.
pub id: Option<String>,
/// The add-on version found in the manifest file, if any.
pub version: Option<String>,
/// The value of the `bss.gecko.admin_install_only` property found in the manifest file,
/// if any.
pub admin_install_only: Option<bool>,
}
impl Manifest {
pub(crate) fn parse<R: io::Read + io::Seek>(archive: &mut ZipArchive<R>) -> Manifest {
match archive.by_name("manifest.json") {
Ok(file) => {
// `manifest.json` file may contain comments so we have to strip them first to get
// a valid JSON document.
let stripped = StripComments::new(file);
match serde_json::from_reader::<_, serde_json::Value>(stripped) {
Ok(data) => {
let mut id = None;
let mut admin_install_only = None;
if let Some(bss) = data
.get("browser_specific_settings")
.or(data.get("applications"))
{
if let Some(gecko) = bss.get("gecko") {
// Retrieve the add-on ID from the manifest.
id = gecko
.get("id")
.and_then(|value| value.as_str())
.map(|s| s.to_owned());
// Look up the "enterprise" manifest prop.
admin_install_only = gecko
.get("admin_install_only")
.and_then(|value| value.as_bool());
}
}
Manifest {
present: true,
id,
version: data
.get("version")
.and_then(|value| value.as_str())
.map(|s| s.to_owned()),
admin_install_only,
}
}
Err(_) => Manifest::default(),
}
}
Err(_) => Manifest::default(),
}
}
/// Indicates whether the `manifest.json` file exists in the XPI.
pub fn exists(&self) -> bool {
self.present
}
/// Indicates whether the `manifest.json` file contains the property indicating that this is an
/// enterprise add-on. The property must be set to `true`. Otherwise, this method will return
/// `false`.
pub fn has_enterprise_flag(&self) -> bool {
self.admin_install_only.is_some_and(|value| value)
}
}
impl fmt::Display for Manifest {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"MANIFEST:\n ID : {}\n Version : {}\n Enterprise: {}",
self.id.as_deref().unwrap_or("N/A"),
self.version.as_deref().unwrap_or("N/A"),
if self.has_enterprise_flag() {
"Yes"
} else {
"No"
},
)
}
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
src/xpi/signatures.rs | Rust | use super::cose_ish::CoseSign;
use cms::cert::{
x509,
x509::{
attr::AttributeTypeAndValue,
certificate::TbsCertificateInner,
der::{
asn1::{PrintableStringRef, TeletexStringRef, UtcTime, Utf8StringRef},
Decode, Encode, Tag, Tagged,
},
Certificate,
},
CertificateChoices,
};
use cms::content_info::ContentInfo;
use cms::signed_data::SignedData;
use const_oid::db::{
rfc4519::{COMMON_NAME, ORGANIZATIONAL_UNIT_NAME},
rfc5912::{ID_SHA_1, ID_SHA_256},
};
use serde::{Serialize, Serializer};
use std::convert::{From, TryInto};
use std::{fmt, io, io::Read, time::Duration};
use zip::ZipArchive;
#[derive(Debug, PartialEq)]
/// Represents a date in a certificate.
pub struct Date(x509::time::Time);
impl Date {
pub fn utc_time_from_duration(duration: Duration) -> Self {
Date(x509::time::Time::from(
UtcTime::from_unix_duration(duration).expect("failed to make UtcTime"),
))
}
}
impl Default for Date {
fn default() -> Self {
Date::utc_time_from_duration(Duration::ZERO)
}
}
impl Serialize for Date {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let s = format!("{}", self.0);
serializer.serialize_str(&s)
}
}
impl fmt::Display for Date {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Debug, PartialEq, Default)]
pub enum Environment {
#[default]
Unknown,
Development,
Staging,
Production,
}
impl fmt::Display for Environment {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Environment::Unknown => "UNKNOWN",
Environment::Development => "DEVELOPMENT",
Environment::Staging => "STAGING",
Environment::Production => "PRODUCTION",
}
)
}
}
#[derive(Default, Serialize)]
/// Represents some of the information found in a certificate.
pub struct CertificateInfo {
pub common_name: String,
pub organizational_unit: String,
pub end_date: Date,
}
impl CertificateInfo {
fn env(&self) -> Environment {
if self.common_name.contains("dev.amo.root.ca") {
Environment::Development
} else if self.common_name.contains("staging") {
Environment::Staging
} else {
Environment::Production
}
}
}
impl TryInto<CertificateInfo> for &CertificateChoices {
type Error = ();
fn try_into(self) -> Result<CertificateInfo, Self::Error> {
match self {
CertificateChoices::Certificate(cert) => Ok((&cert.tbs_certificate).into()),
_ => Err(()),
}
}
}
impl From<&Certificate> for CertificateInfo {
fn from(value: &Certificate) -> Self {
(&value.tbs_certificate).into()
}
}
impl From<&TbsCertificateInner> for CertificateInfo {
fn from(tbs_cert: &TbsCertificateInner) -> Self {
let mut common_name = "N/A".to_string();
let mut organizational_unit = "N/A".to_string();
for rdn in tbs_cert.subject.0.iter().rev() {
if let Some(atv) = rdn.0.get(0) {
match atv.oid {
COMMON_NAME => {
common_name = atv_to_string(atv);
}
ORGANIZATIONAL_UNIT_NAME => {
organizational_unit = atv_to_string(atv);
}
_ => {}
};
}
}
CertificateInfo {
common_name,
organizational_unit,
end_date: Date(tbs_cert.validity.not_after),
}
}
}
impl fmt::Display for CertificateInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Common Name (CN): {}\n \
Organizational Unit (OU): {}\n \
End Date : {}",
self.common_name, self.organizational_unit, self.end_date
)
}
}
#[derive(Debug, PartialEq)]
/// Represents the kind of signature found in the XPI.
pub enum SignatureKind {
/// The XPI has been signed as a regular add-on.
Regular,
/// The XPI has been signed as a privileged add-on.
Privileged,
/// The XPI has been signed as a system add-on.
System,
}
impl fmt::Display for SignatureKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
SignatureKind::Regular => "REGULAR ADD-ON",
SignatureKind::Privileged => "PRIVILEGED ADD-ON",
SignatureKind::System => "SYSTEM ADD-ON",
}
)
}
}
#[derive(Default, Serialize)]
/// Represents a signature found in an [`XPI`](`crate::XPI`).
pub struct Signature {
present: bool,
pub algorithm: Option<String>,
pub certificates: Vec<CertificateInfo>,
}
impl Signature {
pub fn exists(&self) -> bool {
self.present
}
pub fn env(&self) -> Environment {
if self.exists() {
self.certificates[0].env()
} else {
Environment::Unknown
}
}
pub fn kind(&self) -> SignatureKind {
if self
.certificates
.iter()
.any(|cert| cert.organizational_unit == "Mozilla Extensions")
{
SignatureKind::Privileged
} else if self
.certificates
.iter()
.any(|cert| cert.organizational_unit == "Mozilla Components")
{
SignatureKind::System
} else {
SignatureKind::Regular
}
}
}
impl fmt::Display for Signature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
" └── {} / {} / {} / {}\n └── Certificates:",
if self.present { "PRESENT" } else { "ABSENT" },
self.env(),
self.algorithm.as_deref().unwrap_or("N/A"),
self.kind(),
)?;
for cert in &self.certificates {
write!(f, "\n └── {}", cert)?;
}
Ok(())
}
}
impl From<CoseSign> for Signature {
fn from(cs: CoseSign) -> Self {
Signature {
present: true,
algorithm: Some(cs.algorithm),
certificates: cs
.certificates
.into_iter()
.map(|c: Certificate| (&c).into())
.collect(),
}
}
}
impl From<SignedData> for Signature {
fn from(data: SignedData) -> Self {
// TODO: avoid `unwrap()`
let digest_algorithm = match data.signer_infos.0.get(0).unwrap().digest_alg.oid {
ID_SHA_1 => "SHA-1",
ID_SHA_256 => "SHA-256",
_ => "unknown",
};
Signature {
present: true,
algorithm: Some(digest_algorithm.to_string()),
certificates: data
.certificates
.map_or(vec![], |certs| certs.0.into_vec())
.iter()
.rev()
.flat_map(|choice| choice.try_into())
.collect(),
}
}
}
#[derive(Serialize)]
/// Represents the set of signatures possibly found in an [`XPI`](`crate::XPI`) file.
pub struct Signatures {
/// A PKCS#7 signature.
pub pkcs7: Signature,
/// A COSEish signature.
pub cose: Signature,
}
impl Signatures {
/// Whether there is at least one signature found in the [`XPI`](`crate::XPI`) file.
pub fn has_signatures(&self) -> bool {
self.pkcs7.exists() || self.cose.exists()
}
pub(crate) fn parse<R: io::Read + io::Seek>(archive: &mut ZipArchive<R>) -> Signatures {
Signatures {
pkcs7: Signatures::parse_pkcs7(archive),
cose: Signatures::parse_cose(archive),
}
}
fn parse_pkcs7<R: io::Read + io::Seek>(archive: &mut ZipArchive<R>) -> Signature {
let has_pkcs7_manifest = archive.by_name("META-INF/manifest.mf").is_ok();
let has_pkcs7_mozilla = archive.by_name("META-INF/mozilla.sf").is_ok();
let maybe_sig_file = archive.by_name("META-INF/mozilla.rsa");
let has_pkcs7 = has_pkcs7_manifest && has_pkcs7_mozilla && maybe_sig_file.is_ok();
if let Ok(mut sig_file) = maybe_sig_file {
let mut buffer = Vec::new();
if sig_file.read_to_end(&mut buffer).is_ok() {
if let Ok(data) = ContentInfo::from_der(&buffer)
.and_then(|ci| ci.content.to_der())
.and_then(|der| SignedData::from_der(&der))
{
return data.into();
}
}
}
Signature {
present: has_pkcs7,
..Signature::default()
}
}
fn parse_cose<R: io::Read + io::Seek>(archive: &mut ZipArchive<R>) -> Signature {
let has_cose_manifest = archive.by_name("META-INF/cose.manifest").is_ok();
let maybe_sig_file = archive.by_name("META-INF/cose.sig");
let has_cose = has_cose_manifest && maybe_sig_file.is_ok();
if let Ok(mut sig_file) = maybe_sig_file {
let mut buffer = Vec::new();
if sig_file.read_to_end(&mut buffer).is_ok() {
if let Ok(cs) = CoseSign::parse(&buffer) {
return cs.into();
}
}
}
Signature {
present: has_cose,
..Signature::default()
}
}
}
impl fmt::Display for Signatures {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"SIGNATURES:\n PKCS7:\n{}\n COSE:\n{}",
self.pkcs7, self.cose
)
}
}
fn atv_to_string(atv: &AttributeTypeAndValue) -> String {
match atv.value.tag() {
Tag::PrintableString => PrintableStringRef::try_from(&atv.value)
.unwrap()
.as_str()
.to_owned(),
Tag::Utf8String => Utf8StringRef::try_from(&atv.value)
.unwrap()
.as_str()
.to_owned(),
Tag::TeletexString => TeletexStringRef::try_from(&atv.value)
.unwrap()
.as_str()
.to_owned(),
_ => "???".to_string(),
}
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
tests/xpi_test.rs | Rust | use std::io::Cursor;
use std::time::Duration;
use xpidump::{Date, Environment, RecommendationState, Signature, SignatureKind, XPI};
use zip::ZipArchive;
fn assert_signature(signature: &Signature, kind: SignatureKind, env: Environment, algorithm: &str) {
assert!(signature.exists());
assert_eq!(kind, signature.kind());
assert_eq!(env, signature.env());
assert_eq!(
algorithm,
signature.algorithm.as_ref().expect("expect algorithm")
);
let expected_ou = match kind {
SignatureKind::Privileged => "Mozilla Extensions",
SignatureKind::Regular => "Production",
SignatureKind::System => "Mozilla Components",
};
// The end certificate should contain a deterministic OU.
assert_eq!(expected_ou, signature.certificates[1].organizational_unit);
}
#[test]
fn test_prod_regular_addon() {
let bytes = include_bytes!("fixtures/amo_info-1.25.0.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
"{db55bb9b-0d9f-407f-9b65-da9dd29c8d32}",
xpi.manifest.id.expect("expect add-on ID")
);
assert_eq!(
"1.25.0",
xpi.manifest.version.expect("expect add-on version")
);
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Regular,
Environment::Production,
"SHA-1",
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(1743724800)),
xpi.signatures.pkcs7.certificates[0].end_date
);
assert_signature(
&xpi.signatures.cose,
SignatureKind::Regular,
Environment::Production,
"ES256",
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(1743724800)),
xpi.signatures.cose.certificates[0].end_date
);
}
#[test]
fn test_prod_old_regular_addon() {
let bytes = include_bytes!("fixtures/colorzilla-3.3.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(xpi.manifest.id.is_none());
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!("3.3", xpi.manifest.version.expect("expect add-on version"));
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Regular,
Environment::Production,
"SHA-1",
);
// Verify TeletexString values.
assert_eq!(
"{6AC85730-7D0F-4de0-B3FA-21142DD85326}",
xpi.signatures.pkcs7.certificates[1].common_name
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(1741996362)),
xpi.signatures.pkcs7.certificates[0].end_date
);
assert!(!xpi.signatures.cose.exists());
}
#[test]
fn test_prod_privileged_addon() {
let bytes = include_bytes!("fixtures/remote-settings-devtools.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
"remote-settings-devtools@mozilla.com",
xpi.manifest.id.expect("expect add-on ID")
);
assert_eq!(
"1.8.1buildid20230725.150941",
xpi.manifest.version.expect("expect add-on version")
);
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Privileged,
Environment::Production,
"SHA-256",
);
assert_signature(
&xpi.signatures.cose,
SignatureKind::Privileged,
Environment::Production,
"ES256",
);
}
#[test]
fn test_staging_regular_addon() {
let bytes = include_bytes!("fixtures/dev-new.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
"{c208c857-c691-4c69-bfa9-3c2b04f4a0ec}",
xpi.manifest.id.expect("expect add-on ID")
);
assert_eq!("16.0", xpi.manifest.version.expect("expect add-on version"));
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Regular,
Environment::Staging,
"SHA-1",
);
assert_signature(
&xpi.signatures.cose,
SignatureKind::Regular,
Environment::Staging,
"ES256",
);
}
#[test]
fn test_staging_old_recommended_addon() {
let bytes = include_bytes!("fixtures/dev-old-recommended.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
vec![
RecommendationState::Recommended,
RecommendationState::RecommendedAndroid
],
xpi.recommendation.unwrap().states
);
assert_eq!("alex3@mail.com", xpi.manifest.id.expect("expect add-on ID"));
assert_eq!("1.1", xpi.manifest.version.expect("expect add-on version"));
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Regular,
Environment::Staging,
"SHA-1",
);
assert!(xpi.signatures.cose.exists());
}
#[test]
fn test_staging_system_addon() {
let bytes = include_bytes!("fixtures/webcompat.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
"webcompat@mozilla.org",
xpi.manifest.id.expect("expect add-on ID")
);
assert_eq!(
"125.1.0buildid20240321.174451",
xpi.manifest.version.expect("expect add-on version")
);
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::System,
Environment::Staging,
"SHA-256",
);
assert_signature(
&xpi.signatures.cose,
SignatureKind::System,
Environment::Staging,
"ES256",
);
}
#[test]
fn test_long_id() {
let bytes = include_bytes!("fixtures/laboratory_by_mozilla-3.0.8.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
vec![RecommendationState::Line],
xpi.recommendation.unwrap().states
);
assert_eq!(
"1b2383b324c8520974ee097e46301d5ca4e076de387c02886f1c6b1503671586@pokeinthe.io",
xpi.manifest.id.expect("expect add-on ID")
);
// AMO will pass the SHA-256 hash of an add-on ID to Autograph when its length is > 64 chars.
assert_eq!(
"237aafe39e41ad97721ba6b7d41ca597d0b9d67c54da10c079c3bb7ffc1853b3",
xpi.signatures.pkcs7.certificates[1].common_name
);
assert_eq!(
"237aafe39e41ad97721ba6b7d41ca597d0b9d67c54da10c079c3bb7ffc1853b3",
xpi.signatures.cose.certificates[1].common_name
);
}
#[test]
fn test_unsigned_addon() {
let bytes = include_bytes!("fixtures/unsigned.zip");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(xpi.manifest.id.is_none());
assert_eq!(
"1.0",
xpi.manifest
.version
.as_ref()
.expect("expect add-on version")
);
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert!(!xpi.signatures.has_signatures());
}
#[test]
fn test_staging_line_extension() {
let bytes = include_bytes!("fixtures/line-staging-cas-cur.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
"{0cdc308b-4c2a-497d-916a-164d602ed358}",
xpi.manifest.id.expect("expect add-on ID")
);
assert_eq!(
"109.2",
xpi.manifest.version.expect("expect add-on version")
);
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Regular,
Environment::Staging,
"SHA-1",
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(1741910400)),
xpi.signatures.pkcs7.certificates[0].end_date
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(2026818980)),
xpi.signatures.pkcs7.certificates[1].end_date
);
assert_signature(
&xpi.signatures.cose,
SignatureKind::Regular,
Environment::Staging,
"ES256",
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(1741910400)),
xpi.signatures.cose.certificates[0].end_date
);
assert_eq!(
Date::utc_time_from_duration(Duration::from_secs(2026818980)),
xpi.signatures.cose.certificates[1].end_date
);
}
#[test]
fn test_amo_localdev() {
let bytes = include_bytes!("fixtures/amo-localdev.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(!xpi.is_recommended());
assert!(!xpi.is_enterprise());
assert_eq!(
"a-test-extension@will.drnd.me",
xpi.manifest.id.expect("expect add-on ID")
);
assert_signature(
&xpi.signatures.pkcs7,
SignatureKind::Regular,
Environment::Development,
"SHA-1",
);
assert_signature(
&xpi.signatures.cose,
SignatureKind::Regular,
Environment::Development,
"ES256",
);
}
#[test]
fn test_enterprise() {
let bytes = include_bytes!("fixtures/enterprise-dev.xpi");
let reader = Cursor::new(bytes);
let mut archive = ZipArchive::new(reader).unwrap();
let xpi = XPI::new(&mut archive);
assert!(xpi.manifest.exists());
assert!(xpi.is_enterprise());
}
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
web/bootstrap.js | JavaScript | // A dependency graph that contains any wasm must all be imported
// asynchronously. This `bootstrap.js` file does the single async import, so
// that no one else needs to worry about it again.
import("./index.js").catch((e) =>
console.error("Error importing `index.js`:", e),
);
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
web/index.html | HTML | <!DOCTYPE html>
<html lang="en">
<head>
<title>xpidump</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="stylesheet" href="https://unpkg.com/terminal.css@0.7.4/dist/terminal.min.css" />
<style>
pre, pre code {
background-color: var(--code-bg-color);
}
pre code {
overflow-x: initial;
}
table td {
line-break: anywhere;
}
footer p {
margin-top: 1em;
font-size: 0.8em;
text-align: center;
}
</style>
</head>
<body class="terminal">
<div class="container">
<header class="header">
<div class="logo">
<a href="./">xpidump</a>
</div>
<hr>
</header>
<main>
<noscript>
<div class="terminal-alert terminal-alert-error">
Please enable JavaScript on this page
</div>
</noscript>
<div>
<p>
This page allows you to dump information about an <em>XPI</em>
file. No "server-side" is involved, everything is performed locally
in the browser. Select a file in the "Input:" section and the
information about it will be displayed under "Output:".
</p>
</div>
<div class="input">
<h2>Input:</h2>
<div class="form-group">
<label for="input-file">Select a file:</label>
<input id="input-file" type="file" accept=".xpi,.zip">
</div>
</div>
<div class="output">
<h2>Output:</h2>
<p id="output-pretty"></p>
<details>
<summary>raw output</summary>
<pre><code id="output-raw"></code></pre>
</details>
</div>
</main>
<footer>
<p>
Made by <a href="http://mamot.fr/@willdurand">@willdurand</a> – The source code is available on
<a href="https://github.com/willdurand/xpidump">GitHub</a> (MIT license) -
<a href="https://github.com/willdurand/xpidump/commit/<%= git_hash %>"><%= short_git_hash %></a>.
</p>
</footer>
</div>
<script src="./bootstrap.js"></script>
</body>
</html>
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
web/index.js | JavaScript | import * as xpidump from "xpidump";
const updateUI = (xpi) => {
const $outputPretty = document.getElementById("output-pretty");
const $outputRaw = document.getElementById("output-raw");
if (!xpi.has_manifest) {
$outputPretty.textContent = `⚠️ This file doesn't look like an add-on.`;
$outputRaw.textContent = "";
return;
}
const {
cose_algorithm,
env,
has_cose_sig,
has_pkcs7_sig,
is_enterprise,
kind,
manifest,
pkcs7_algorithm,
} = xpi;
// We don't know what kind of add-on we are looking at when it is not signed.
const prettyKind = has_pkcs7_sig
? `<strong>${kind}</strong> add-on`
: "add-on";
$outputPretty.innerHTML = `
✅ ${manifest.id ? `This ${prettyKind} has the following ID in its manifest: <code>${manifest.id}</code>` : `This ${prettyKind} does not have an ID in its manifest`}. Its version is: <code>${manifest.version}</code>.
<br>
<br>
${has_pkcs7_sig ? `${has_cose_sig ? "🔐" : "🔓"} It has been signed with the <strong>${env}</strong> root certificate. ${has_cose_sig ? "This add-on is dual-signed (PKCS#7 and COSE)" : "This add-on is <strong>not</strong> signed with COSE"}. The PKCS#7 digest algorithm is: <strong>${pkcs7_algorithm}</strong>. ${has_cose_sig ? `The COSE algorithm is: <strong>${cose_algorithm}</strong>.` : ""}` : `❌ It doesn't appear to be signed.`}
`;
if (is_enterprise) {
$outputPretty.innerHTML += `
<br>
<br>
👔 This is an <strong>enterprise</strong> add-on.
`;
}
$outputRaw.textContent = JSON.stringify(xpi.to_js(), null, 2);
};
document.getElementById("input-file").addEventListener(
"change",
(event) => {
// TODO: make sure `target` is defined
const { files } = event.target;
const reader = new FileReader();
reader.onload = function (e) {
const xpi = new xpidump.XPI(new Uint8Array(reader.result));
updateUI(xpi);
};
// TODO: check the presence of a file
reader.readAsArrayBuffer(files[0]);
},
false,
);
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
web/webpack.config.js | JavaScript | const HtmlWebpackPlugin = require("html-webpack-plugin");
const path = require("path");
const git_hash = require("child_process")
.execSync("git rev-parse HEAD")
.toString()
.trim();
module.exports = {
entry: "./bootstrap.js",
output: {
path: path.resolve(__dirname, "..", "docs"),
filename: "bootstrap.js",
},
mode: process.env.NODE_ENV || "development",
plugins: [
new HtmlWebpackPlugin({
template: "index.html",
filename: "index.html",
templateParameters: {
git_hash,
short_git_hash: git_hash.slice(0, 7),
},
}),
],
experiments: {
asyncWebAssembly: true,
},
};
| willdurand/xpidump | 8 | A simple tool to dump information about XPI files. | Rust | willdurand | William Durand | mozilla |
.eslintrc.js | JavaScript | module.exports = {
root: true,
env: {
browser: true,
amd: true,
node: true,
es6: true,
},
extends: [
'eslint:recommended',
'plugin:jsx-a11y/recommended',
'plugin:prettier/recommended',
'next',
'next/core-web-vitals',
],
rules: {
'prettier/prettier': 'error',
'react/react-in-jsx-scope': 'off',
'@next/next/no-img-element': 'off',
'jsx-a11y/anchor-is-valid': [
'error',
{
components: ['Link'],
specialLink: ['hrefLeft', 'hrefRight'],
aspects: ['invalidHref', 'preferButton'],
},
],
'react/prop-types': 0,
'no-unused-vars': 0,
'react/no-unescaped-entities': 0,
},
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Card.js | JavaScript | import Image from './Image'
import Link from './Link'
const Card = ({ title, description, imgSrc, href }) => (
<div className="p-4 md:w-1/2 md" style={{ maxWidth: '544px' }}>
<div className="h-full overflow-hidden border-2 border-gray-200 rounded-md border-opacity-60 dark:border-gray-700">
{href ? (
<Link href={href} aria-label={`Link to ${title}`}>
<Image
alt={title}
src={imgSrc}
className="object-cover object-center lg:h-48 md:h-36"
width={544}
height={306}
/>
</Link>
) : (
<Image
alt={title}
src={imgSrc}
className="object-cover object-center lg:h-48 md:h-36"
width={544}
height={306}
/>
)}
<div className="p-6">
<h2 className="mb-3 text-2xl font-bold leading-8 tracking-tight">
{href ? (
<Link href={href} aria-label={`Link to ${title}`}>
{title}
</Link>
) : (
title
)}
</h2>
<p className="mb-3 prose text-gray-500 max-w-none dark:text-gray-400">{description}</p>
{href && (
<Link
href={href}
className="text-base font-medium leading-6 text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
aria-label={`Link to ${title}`}
>
Learn more →
</Link>
)}
</div>
</div>
</div>
)
export default Card
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Footer.js | JavaScript | import Link from './Link'
import siteMetadata from '@/data/siteMetadata'
import SocialIcon from '@/components/social-icons'
export default function Footer() {
return (
<footer>
<div className="flex flex-col items-center mt-16">
<div className="flex mb-3 space-x-4">
<SocialIcon kind="github" href={siteMetadata.github} size="6" />
<SocialIcon kind="facebook" href={siteMetadata.facebook} size="6" />
<SocialIcon kind="youtube" href={siteMetadata.youtube} size="6" />
<SocialIcon kind="linkedin" href={siteMetadata.linkedin} size="6" />
<SocialIcon kind="twitter" href={siteMetadata.twitter} size="6" />
<SocialIcon kind="rss" href={'/feed.xml'} size="6" />
</div>
<div className="flex mb-2 space-x-2 text-sm text-gray-500 dark:text-gray-400">
<div>
<Link href="/">{siteMetadata.author}</Link>
</div>
<div>{` • `}</div>
<div>{`© ${new Date().getFullYear()}`}</div>
</div>
</div>
</footer>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Image.js | JavaScript | import NextImage from 'next/image'
// eslint-disable-next-line jsx-a11y/alt-text
const Image = ({ ...rest }) => <NextImage {...rest} />
export default Image
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/LayoutWrapper.js | JavaScript | import siteMetadata from '@/data/siteMetadata'
import headerNavLinks from '@/data/headerNavLinks'
import Logo from '@/data/logo.svg'
import Link from './Link'
import SectionContainer from './SectionContainer'
import Footer from './Footer'
import MobileNav from './MobileNav'
import ThemeSwitch from './ThemeSwitch'
const LayoutWrapper = ({ children }) => {
return (
<SectionContainer>
<div className="flex flex-col justify-between h-screen">
<header className="flex items-center justify-between py-10">
<div>
<Link href="/" aria-label="Tailwind CSS Blog">
<div className="flex items-center justify-between">
{typeof siteMetadata.headerTitle === 'string' ? (
<div className="h-6 text-5xl mb-8 font-bold font-hand sm:block">
{`-> ${siteMetadata.headerTitle}`}
</div>
) : (
siteMetadata.headerTitle
)}
</div>
</Link>
</div>
<div className="flex items-center text-base leading-5">
<div className="hidden sm:block">
{headerNavLinks.map((link) => (
<Link
key={link.title}
href={link.href}
className="p-1 font-medium text-gray-900 sm:p-4 dark:text-gray-100"
>
{link.title}
</Link>
))}
</div>
<ThemeSwitch />
<MobileNav />
</div>
</header>
<main className="mb-auto">{children}</main>
<Footer />
</div>
</SectionContainer>
)
}
export default LayoutWrapper
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Link.js | JavaScript | /* eslint-disable jsx-a11y/anchor-has-content */
import Link from 'next/link'
const CustomLink = ({ href, ...rest }) => {
const isInternalLink = href && href.startsWith('/')
const isAnchorLink = href && href.startsWith('#')
if (isInternalLink) {
return (
<Link href={href}>
<a {...rest} />
</Link>
)
}
if (isAnchorLink) {
return <a href={href} {...rest} />
}
return <a target="_blank" rel="noopener noreferrer" href={href} {...rest} />
}
export default CustomLink
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/MDXComponents.js | JavaScript | /* eslint-disable react/display-name */
import { useMemo } from 'react'
import { getMDXComponent } from 'mdx-bundler/client'
import Image from './Image'
import CustomLink from './Link'
import TOCInline from './TOCInline'
import Pre from './Pre'
export const MDXComponents = {
Image,
TOCInline,
a: CustomLink,
pre: Pre,
wrapper: ({ components, layout, ...rest }) => {
const Layout = require(`../layouts/${layout}`).default
return <Layout {...rest} />
},
}
export const MDXLayoutRenderer = ({ layout, mdxSource, ...rest }) => {
const MDXLayout = useMemo(() => getMDXComponent(mdxSource), [mdxSource])
return <MDXLayout layout={layout} components={MDXComponents} {...rest} />
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/MobileNav.js | JavaScript | import { useState } from 'react'
import Link from './Link'
import headerNavLinks from '@/data/headerNavLinks'
const MobileNav = () => {
const [navShow, setNavShow] = useState(false)
const onToggleNav = () => {
setNavShow((status) => {
if (status) {
document.body.style.overflow = 'auto'
} else {
// Prevent scrolling
document.body.style.overflow = 'hidden'
}
return !status
})
}
return (
<div className="sm:hidden">
<button
type="button"
className="w-8 h-8 ml-1 mr-1 rounded"
aria-label="Toggle Menu"
onClick={onToggleNav}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
className="text-gray-900 dark:text-gray-100"
>
{navShow ? (
<path
fillRule="evenodd"
d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z"
clipRule="evenodd"
/>
) : (
<path
fillRule="evenodd"
d="M3 5a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1zM3 10a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1zM3 15a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1z"
clipRule="evenodd"
/>
)}
</svg>
</button>
<div
className={`fixed w-full h-full top-24 right-0 bg-gray-200 dark:bg-gray-800 opacity-95 z-10 transform ease-in-out duration-300 ${
navShow ? 'translate-x-0' : 'translate-x-full'
}`}
>
<button
type="button"
aria-label="toggle modal"
className="fixed w-full h-full cursor-auto focus:outline-none"
onClick={onToggleNav}
></button>
<nav className="fixed h-full mt-8">
{headerNavLinks.map((link) => (
<div key={link.title} className="px-12 py-4">
<Link
href={link.href}
className="text-2xl font-bold tracking-widest text-gray-900 dark:text-gray-100"
onClick={onToggleNav}
>
{link.title}
</Link>
</div>
))}
</nav>
</div>
</div>
)
}
export default MobileNav
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/PageTitle.js | JavaScript | export default function PageTitle({ children }) {
return (
<h1 className="text-3xl font-extrabold leading-9 tracking-tight text-gray-900 dark:text-gray-100 sm:text-4xl sm:leading-10 md:text-5xl md:leading-14">
{children}
</h1>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Pagination.js | JavaScript | import Link from '@/components/Link'
export default function Pagination({ totalPages, currentPage }) {
const prevPage = parseInt(currentPage) - 1 > 0
const nextPage = parseInt(currentPage) + 1 <= parseInt(totalPages)
return (
<div className="pt-6 pb-8 space-y-2 md:space-y-5">
<nav className="flex justify-between">
{!prevPage && (
<button rel="previous" className="cursor-auto disabled:opacity-50" disabled={!prevPage}>
Previous
</button>
)}
{prevPage && (
<Link href={currentPage - 1 === 1 ? `/blog/` : `/blog/page/${currentPage - 1}`}>
<button rel="previous">Previous</button>
</Link>
)}
<span>
{currentPage} of {totalPages}
</span>
{!nextPage && (
<button rel="next" className="cursor-auto disabled:opacity-50" disabled={!nextPage}>
Next
</button>
)}
{nextPage && (
<Link href={`/blog/page/${currentPage + 1}`}>
<button rel="next">Next</button>
</Link>
)}
</nav>
</div>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Pre.js | JavaScript | import { useState, useRef } from 'react'
const Pre = (props) => {
const textInput = useRef(null)
const [hovered, setHovered] = useState(false)
const [copied, setCopied] = useState(false)
const onEnter = () => {
setHovered(true)
}
const onExit = () => {
setHovered(false)
setCopied(false)
}
const onCopy = () => {
setCopied(true)
navigator.clipboard.writeText(textInput.current.innerText)
setTimeout(() => {
setCopied(false)
}, 2000)
}
return (
<div ref={textInput} onMouseEnter={onEnter} onMouseLeave={onExit} className="relative">
{hovered && (
<button
aria-label="Copy code"
type="button"
className={`absolute right-2 top-2 w-8 h-8 p-1 rounded border-2 bg-gray-700 dark:bg-gray-800 ${
copied
? 'focus:outline-none focus:border-green-400 border-green-400'
: 'border-gray-300'
}`}
onClick={onCopy}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
stroke="currentColor"
fill="none"
className={copied ? 'text-green-400' : 'text-gray-300'}
>
{copied ? (
<>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M9 5H7a2 2 0 00-2 2v12a2 2 0 002 2h10a2 2 0 002-2V7a2 2 0 00-2-2h-2M9 5a2 2 0 002 2h2a2 2 0 002-2M9 5a2 2 0 012-2h2a2 2 0 012 2m-6 9l2 2 4-4"
/>
</>
) : (
<>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M9 5H7a2 2 0 00-2 2v12a2 2 0 002 2h10a2 2 0 002-2V7a2 2 0 00-2-2h-2M9 5a2 2 0 002 2h2a2 2 0 002-2M9 5a2 2 0 012-2h2a2 2 0 012 2"
/>
</>
)}
</svg>
</button>
)}
<pre>{props.children}</pre>
</div>
)
}
export default Pre
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/SEO.js | JavaScript | import Head from 'next/head'
import { useRouter } from 'next/router'
import siteMetadata from '@/data/siteMetadata'
export const PageSeo = ({ title, description }) => {
const router = useRouter()
return (
<Head>
<title>{`${title}`}</title>
<meta name="robots" content="follow, index" />
<meta name="description" content={description} />
<meta property="og:url" content={`${siteMetadata.siteUrl}${router.asPath}`} />
<meta property="og:type" content="website" />
<meta property="og:site_name" content={siteMetadata.title} />
<meta property="og:description" content={description} />
<meta property="og:title" content={title} />
<meta property="og:image" content={`${siteMetadata.siteUrl}${siteMetadata.socialBanner}`} />
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:site" content={siteMetadata.twitter} />
<meta name="twitter:title" content={title} />
<meta name="twitter:description" content={description} />
<meta name="twitter:image" content={`${siteMetadata.siteUrl}${siteMetadata.socialBanner}`} />
</Head>
)
}
export const BlogSeo = ({ authorDetails, title, summary, date, lastmod, url, images = [] }) => {
const router = useRouter()
const publishedAt = new Date(date).toISOString()
const modifiedAt = new Date(lastmod || date).toISOString()
let imagesArr =
images.length === 0
? [siteMetadata.socialBanner]
: typeof images === 'string'
? [images]
: images
const featuredImages = imagesArr.map((img) => {
return {
'@type': 'ImageObject',
url: `${siteMetadata.siteUrl}${img}`,
}
})
let authorList
if (authorDetails) {
authorList = authorDetails.map((author) => {
return {
'@type': 'Person',
name: author.name,
}
})
} else {
authorList = {
'@type': 'Person',
name: siteMetadata.author,
}
}
const structuredData = {
'@context': 'https://schema.org',
'@type': 'Article',
mainEntityOfPage: {
'@type': 'WebPage',
'@id': url,
},
headline: title,
image: featuredImages,
datePublished: publishedAt,
dateModified: modifiedAt,
author: authorList,
publisher: {
'@type': 'Organization',
name: siteMetadata.author,
logo: {
'@type': 'ImageObject',
url: `${siteMetadata.siteUrl}${siteMetadata.siteLogo}`,
},
},
description: summary,
}
return (
<>
<Head>
<title>{`${title}`}</title>
<meta name="robots" content="follow, index" />
<meta name="description" content={summary} />
<meta property="og:url" content={`${siteMetadata.siteUrl}${router.asPath}`} />
<meta property="og:type" content="article" />
<meta property="og:site_name" content={siteMetadata.title} />
<meta property="og:description" content={summary} />
<meta property="og:title" content={title} />
{featuredImages.map((img) => (
<meta property="og:image" content={img.url} key={img.url} />
))}
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:site" content={siteMetadata.twitter} />
<meta name="twitter:title" content={title} />
<meta name="twitter:description" content={summary} />
<meta name="twitter:image" content={featuredImages[0].url} />
{date && <meta property="article:published_time" content={publishedAt} />}
{lastmod && <meta property="article:modified_time" content={modifiedAt} />}
<link rel="canonical" href={`${siteMetadata.siteUrl}${router.asPath}`} />
<script
type="application/ld+json"
dangerouslySetInnerHTML={{ __html: JSON.stringify(structuredData, null, 2) }}
/>
</Head>
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/SectionContainer.js | JavaScript | export default function SectionContainer({ children }) {
return <div className="max-w-3xl px-4 mx-auto sm:px-6 xl:max-w-5xl xl:px-0">{children}</div>
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/TOCInline.js | JavaScript | /**
* @typedef TocHeading
* @prop {string} value
* @prop {number} depth
* @prop {string} url
*/
/**
* Generates an inline table of contents
* Exclude titles matching this string (new RegExp('^(' + string + ')$', 'i')).
* If an array is passed the array gets joined with a pipe (new RegExp('^(' + array.join('|') + ')$', 'i')).
*
* @param {{
* toc: TocHeading[],
* indentDepth?: number,
* fromHeading?: number,
* toHeading?: number,
* asDisclosure?: boolean,
* exclude?: string|string[]
* }} props
*
*/
const TOCInline = ({
toc,
indentDepth = 3,
fromHeading = 1,
toHeading = 6,
asDisclosure = false,
exclude = '',
}) => {
const re = Array.isArray(exclude)
? new RegExp('^(' + exclude.join('|') + ')$', 'i')
: new RegExp('^(' + exclude + ')$', 'i')
const filteredToc = toc.filter(
(heading) =>
heading.depth >= fromHeading && heading.depth <= toHeading && !re.test(heading.value)
)
const tocList = (
<ul>
{filteredToc.map((heading) => (
<li key={heading.value} className={`${heading.depth >= indentDepth && 'ml-6'}`}>
<a href={heading.url}>{heading.value}</a>
</li>
))}
</ul>
)
return (
<>
{asDisclosure ? (
<details open>
<summary className="pt-2 pb-2 ml-6 text-xl font-bold">Table of Contents</summary>
<div className="ml-6">{tocList}</div>
</details>
) : (
tocList
)}
</>
)
}
export default TOCInline
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/Tag.js | JavaScript | import Link from 'next/link'
import kebabCase from '@/lib/utils/kebabCase'
const Tag = ({ text }) => {
return (
<Link href={`/tags/${kebabCase(text)}`}>
<a className="mr-3 text-sm font-medium uppercase text-primary-500 hover:text-primary-600 dark:hover:text-primary-400">
{text.split(' ').join('-')}
</a>
</Link>
)
}
export default Tag
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/ThemeSwitch.js | JavaScript | import { useEffect, useState } from 'react'
import { useTheme } from 'next-themes'
const ThemeSwitch = () => {
const [mounted, setMounted] = useState(false)
const { theme, setTheme, resolvedTheme } = useTheme()
// When mounted on client, now we can show the UI
useEffect(() => setMounted(true), [])
return (
<button
aria-label="Toggle Dark Mode"
type="button"
className="w-8 h-8 p-1 ml-1 mr-1 rounded sm:ml-4"
onClick={() => setTheme(theme === 'dark' || resolvedTheme === 'dark' ? 'light' : 'dark')}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
className="text-gray-900 dark:text-gray-100"
>
{mounted && (theme === 'dark' || resolvedTheme === 'dark') ? (
<path
fillRule="evenodd"
d="M10 2a1 1 0 011 1v1a1 1 0 11-2 0V3a1 1 0 011-1zm4 8a4 4 0 11-8 0 4 4 0 018 0zm-.464 4.95l.707.707a1 1 0 001.414-1.414l-.707-.707a1 1 0 00-1.414 1.414zm2.12-10.607a1 1 0 010 1.414l-.706.707a1 1 0 11-1.414-1.414l.707-.707a1 1 0 011.414 0zM17 11a1 1 0 100-2h-1a1 1 0 100 2h1zm-7 4a1 1 0 011 1v1a1 1 0 11-2 0v-1a1 1 0 011-1zM5.05 6.464A1 1 0 106.465 5.05l-.708-.707a1 1 0 00-1.414 1.414l.707.707zm1.414 8.486l-.707.707a1 1 0 01-1.414-1.414l.707-.707a1 1 0 011.414 1.414zM4 11a1 1 0 100-2H3a1 1 0 000 2h1z"
clipRule="evenodd"
/>
) : (
<path d="M17.293 13.293A8 8 0 016.707 2.707a8.001 8.001 0 1010.586 10.586z" />
)}
</svg>
</button>
)
}
export default ThemeSwitch
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/analytics/GoogleAnalytics.js | JavaScript | import Script from 'next/script'
import siteMetadata from '@/data/siteMetadata'
const GAScript = () => {
return (
<>
<Script
strategy="lazyOnload"
src={`https://www.googletagmanager.com/gtag/js?id=${siteMetadata.analytics.googleAnalyticsId}`}
/>
<Script strategy="lazyOnload">
{`
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '${siteMetadata.analytics.googleAnalyticsId}', {
page_path: window.location.pathname,
});
`}
</Script>
</>
)
}
export default GAScript
// https://developers.google.com/analytics/devguides/collection/gtagjs/events
export const logEvent = (action, category, label, value) => {
window.gtag?.('event', action, {
event_category: category,
event_label: label,
value: value,
})
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/analytics/Plausible.js | JavaScript | import Script from 'next/script'
import siteMetadata from '@/data/siteMetadata'
const PlausibleScript = () => {
return (
<>
<Script
strategy="lazyOnload"
data-domain={siteMetadata.analytics.plausibleDataDomain}
src="https://plausible.io/js/plausible.js"
/>
<Script strategy="lazyOnload">
{`
window.plausible = window.plausible || function() { (window.plausible.q = window.plausible.q || []).push(arguments) }
`}
</Script>
</>
)
}
export default PlausibleScript
// https://plausible.io/docs/custom-event-goals
export const logEvent = (eventName, ...rest) => {
return window.plausible?.(eventName, ...rest)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/analytics/SimpleAnalytics.js | JavaScript | import Script from 'next/script'
const SimpleAnalyticsScript = () => {
return (
<>
<Script strategy="lazyOnload">
{`
window.sa_event=window.sa_event||function(){var a=[].slice.call(arguments);window.sa_event.q?window.sa_event.q.push(a):window.sa_event.q=[a]};
`}
</Script>
<Script strategy="lazyOnload" src="https://scripts.simpleanalyticscdn.com/latest.js" />
</>
)
}
// https://docs.simpleanalytics.com/events
export const logEvent = (eventName, callback) => {
if (callback) {
return window.sa_event?.(eventName, callback)
} else {
return window.sa_event?.(eventName)
}
}
export default SimpleAnalyticsScript
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/analytics/index.js | JavaScript | import GA from './GoogleAnalytics'
import Plausible from './Plausible'
import SimpleAnalytics from './SimpleAnalytics'
import siteMetadata from '@/data/siteMetadata'
const isProduction = process.env.NODE_ENV === 'production'
const Analytics = () => {
return (
<>
{isProduction && siteMetadata.analytics.plausibleDataDomain && <Plausible />}
{isProduction && siteMetadata.analytics.simpleAnalytics && <SimpleAnalytics />}
{isProduction && siteMetadata.analytics.googleAnalyticsId && <GA />}
</>
)
}
export default Analytics
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/comments/Disqus.js | JavaScript | import React, { useState } from 'react'
import siteMetadata from '@/data/siteMetadata'
const Disqus = ({ frontMatter }) => {
const [enableLoadComments, setEnabledLoadComments] = useState(true)
const COMMENTS_ID = 'disqus_thread'
function LoadComments() {
setEnabledLoadComments(false)
window.disqus_config = function () {
this.page.url = window.location.href
this.page.identifier = frontMatter.slug
}
if (window.DISQUS === undefined) {
const script = document.createElement('script')
script.src = 'https://' + siteMetadata.comment.disqus.shortname + '.disqus.com/embed.js'
script.setAttribute('data-timestamp', +new Date())
script.setAttribute('crossorigin', 'anonymous')
script.async = true
document.body.appendChild(script)
} else {
window.DISQUS.reset({ reload: true })
}
}
return (
<div className="pt-6 pb-6 text-center text-gray-700 dark:text-gray-300">
{enableLoadComments && <button onClick={LoadComments}>Load Comments</button>}
<div className="disqus-frame" id={COMMENTS_ID} />
</div>
)
}
export default Disqus
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/comments/Giscus.js | JavaScript | import React, { useState } from 'react'
import { useTheme } from 'next-themes'
import siteMetadata from '@/data/siteMetadata'
const Giscus = ({ mapping }) => {
const [enableLoadComments, setEnabledLoadComments] = useState(true)
const { theme, resolvedTheme } = useTheme()
const commentsTheme =
siteMetadata.comment.giscusConfig.themeURL === ''
? theme === 'dark' || resolvedTheme === 'dark'
? siteMetadata.comment.giscusConfig.darkTheme
: siteMetadata.comment.giscusConfig.theme
: siteMetadata.comment.giscusConfig.themeURL
const COMMENTS_ID = 'comments-container'
function LoadComments() {
setEnabledLoadComments(false)
const script = document.createElement('script')
script.src = 'https://giscus.app/client.js'
script.setAttribute('data-repo', siteMetadata.comment.giscusConfig.repo)
script.setAttribute('data-repo-id', siteMetadata.comment.giscusConfig.repositoryId)
script.setAttribute('data-category', siteMetadata.comment.giscusConfig.category)
script.setAttribute('data-category-id', siteMetadata.comment.giscusConfig.categoryId)
script.setAttribute('data-mapping', mapping)
script.setAttribute('data-reactions-enabled', siteMetadata.comment.giscusConfig.reactions)
script.setAttribute('data-emit-metadata', siteMetadata.comment.giscusConfig.metadata)
script.setAttribute('data-theme', commentsTheme)
script.setAttribute('crossorigin', 'anonymous')
script.async = true
const comments = document.getElementById(COMMENTS_ID)
if (comments) comments.appendChild(script)
return () => {
const comments = document.getElementById(COMMENTS_ID)
if (comments) comments.innerHTML = ''
}
}
return (
<div className="pt-6 pb-6 text-center text-gray-700 dark:text-gray-300">
{enableLoadComments && <button onClick={LoadComments}>Load Comments</button>}
<div className="giscus" id={COMMENTS_ID} />
</div>
)
}
export default Giscus
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/comments/Utterances.js | JavaScript | import React, { useState } from 'react'
import { useTheme } from 'next-themes'
import siteMetadata from '@/data/siteMetadata'
const Utterances = ({ issueTerm }) => {
const [enableLoadComments, setEnabledLoadComments] = useState(true)
const { theme, resolvedTheme } = useTheme()
const commentsTheme =
theme === 'dark' || resolvedTheme === 'dark'
? siteMetadata.comment.utterancesConfig.darkTheme
: siteMetadata.comment.utterancesConfig.theme
const COMMENTS_ID = 'comments-container'
function LoadComments() {
setEnabledLoadComments(false)
const script = document.createElement('script')
script.src = 'https://utteranc.es/client.js'
script.setAttribute('repo', siteMetadata.comment.utterancesConfig.repo)
script.setAttribute('issue-term', issueTerm)
script.setAttribute('label', siteMetadata.comment.utterancesConfig.label)
script.setAttribute('theme', commentsTheme)
script.setAttribute('crossorigin', 'anonymous')
script.async = true
const comments = document.getElementById(COMMENTS_ID)
if (comments) comments.appendChild(script)
return () => {
const comments = document.getElementById(COMMENTS_ID)
if (comments) comments.innerHTML = ''
}
}
// Added `relative` to fix a weird bug with `utterances-frame` position
return (
<div className="pt-6 pb-6 text-center text-gray-700 dark:text-gray-300">
{enableLoadComments && <button onClick={LoadComments}>Load Comments</button>}
<div className="utterances-frame relative" id={COMMENTS_ID} />
</div>
)
}
export default Utterances
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/comments/index.js | JavaScript | import siteMetadata from '@/data/siteMetadata'
import dynamic from 'next/dynamic'
const UtterancesComponent = dynamic(
() => {
return import('@/components/comments/Utterances')
},
{ ssr: false }
)
const GiscusComponent = dynamic(
() => {
return import('@/components/comments/Giscus')
},
{ ssr: false }
)
const DisqusComponent = dynamic(
() => {
return import('@/components/comments/Disqus')
},
{ ssr: false }
)
const Comments = ({ frontMatter }) => {
let term
switch (
siteMetadata.comment.giscusConfig.mapping ||
siteMetadata.comment.utterancesConfig.issueTerm
) {
case 'pathname':
term = frontMatter.slug
break
case 'url':
term = window.location.href
break
case 'title':
term = frontMatter.title
break
}
return (
<>
{siteMetadata.comment && siteMetadata.comment.provider === 'giscus' && (
<GiscusComponent mapping={term} />
)}
{siteMetadata.comment && siteMetadata.comment.provider === 'utterances' && (
<UtterancesComponent issueTerm={term} />
)}
{siteMetadata.comment && siteMetadata.comment.provider === 'disqus' && (
<DisqusComponent frontMatter={frontMatter} />
)}
</>
)
}
export default Comments
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
components/social-icons/index.js | JavaScript | import Mail from './mail.svg'
import Github from './github.svg'
import Facebook from './facebook.svg'
import Youtube from './youtube.svg'
import Linkedin from './linkedin.svg'
import Instagram from './instagram.svg'
import OpenStreetMap from './openstreetmap.svg'
import Rss from './rss.svg'
import Twitter from './twitter.svg'
// Icons taken from: https://simpleicons.org/
const components = {
mail: Mail,
github: Github,
facebook: Facebook,
youtube: Youtube,
linkedin: Linkedin,
instagram: Instagram,
openstreetmap: OpenStreetMap,
twitter: Twitter,
rss: Rss,
}
const SocialIcon = ({ kind, href, size = 8 }) => {
if (!href) return null
const SocialSvg = components[kind]
return (
<a
className="text-sm text-gray-500 transition hover:text-gray-600"
target="_blank"
rel="noopener noreferrer"
href={href}
>
<span className="sr-only">{kind}</span>
<SocialSvg
className={`fill-current text-gray-700 dark:text-gray-200 hover:text-blue-500 dark:hover:text-blue-400 h-${size} w-${size}`}
/>
</a>
)
}
export default SocialIcon
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
css/tailwind.css | CSS | @tailwind base;
@tailwind components;
@tailwind utilities;
.remark-code-title {
@apply px-5 py-3 font-mono text-sm font-bold text-gray-200 bg-gray-700 rounded-t;
}
.remark-code-title + div > pre {
@apply mt-0 rounded-t-none;
}
.task-list-item:before {
@apply hidden;
}
.code-line {
@apply pl-4 -mx-4 border-l-4 border-gray-800;
}
.highlight-line {
@apply -mx-4 bg-gray-700 bg-opacity-50 border-l-4 border-primary-500;
}
.line-number::before {
@apply pr-4 -ml-2 text-gray-400;
content: attr(line);
}
html {
scroll-behavior: smooth;
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
data/headerNavLinks.js | JavaScript | const headerNavLinks = [
{ href: '/blog', title: 'Blog' },
// { href: '/tags', title: 'Tags' },
{ href: '/projects', title: 'Projects' },
{ href: '/about', title: 'About' },
]
export default headerNavLinks
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
data/projectsData.js | JavaScript | const projectsData = [
{
title: 'OSMCha',
description: `An OpenStreetMap validation tool. OSMCha registers all changesets
created on OSM, provides a good visualization and run some analysis in order to flag
possibly bad edits. Mapbox, Facebook, Apple and the OSM community uses OSMCha
to keep track of the changes happening on the OSM data.`,
imgSrc: '/static/images/osmcha.jpg',
href: 'https://www.osmcha.org',
},
{
title: 'Tasking Manager 4',
description: `The most popular tool for teams to coordinate mapping on
OpenStreetMap. Tasking Manager allows to define an area of interest and divide it in
in smaller parts, so a group of contributors can map at the same time. It's widely
used by humanitarian organizations and companies that work with OpenStreetMap data.`,
imgSrc: '/static/images/tm.jpg',
href: 'https://github.com/hotosm/tasking-manager/',
},
]
export default projectsData
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
data/siteMetadata.js | JavaScript | const siteMetadata = {
title: 'Wille',
author: 'Wille Marcel',
headerTitle: 'Wille',
description: 'Software engineering, maps & other thoughts',
language: 'en-gb',
siteUrl: 'https://wille.me',
siteRepo: 'https://github.com/willemarcel/wille.blog',
image: '/static/images/avatar.jpg',
head: '/static/images/head.jpg',
socialBanner: '/static/images/social.jpg',
email: 'address@yoursite.com',
github: 'https://github.com/willemarcel',
twitter: 'https://twitter.com/_wille',
linkedin: 'https://www.linkedin.com/in/willemarcel/',
locale: 'en-GB',
analytics: {
// supports plausible, simpleAnalytics or googleAnalytics
plausibleDataDomain: '', // e.g. tailwind-nextjs-starter-blog.vercel.app
simpleAnalytics: false, // true or false
googleAnalyticsId: '', // e.g. UA-000000-2 or G-XXXXXXX
},
comment: {
// Select a provider and use the environment variables associated to it
// https://vercel.com/docs/environment-variables
provider: null, // supported providers: giscus, utterances, disqus
giscusConfig: {
// Visit the link below, and follow the steps in the 'configuration' section
// https://giscus.app/
repo: process.env.NEXT_PUBLIC_GISCUS_REPO,
repositoryId: process.env.NEXT_PUBLIC_GISCUS_REPOSITORY_ID,
category: process.env.NEXT_PUBLIC_GISCUS_CATEGORY,
categoryId: process.env.NEXT_PUBLIC_GISCUS_CATEGORY_ID,
mapping: 'pathname', // supported options: pathname, url, title
reactions: '1', // Emoji reactions: 1 = enable / 0 = disable
// Send discussion metadata periodically to the parent window: 1 = enable / 0 = disable
metadata: '0',
// theme example: light, dark, dark_dimmed, dark_high_contrast
// transparent_dark, preferred_color_scheme, custom
theme: 'light',
// theme when dark mode
darkTheme: 'transparent_dark',
// If the theme option above is set to 'custom`
// please provide a link below to your custom theme css file.
// example: https://giscus.app/themes/custom_example.css
themeURL: '',
},
utterancesConfig: {
// Visit the link below, and follow the steps in the 'configuration' section
// https://utteranc.es/
repo: process.env.NEXT_PUBLIC_UTTERANCES_REPO,
issueTerm: '', // supported options: pathname, url, title
label: '', // label (optional): Comment 💬
// theme example: github-light, github-dark, preferred-color-scheme
// github-dark-orange, icy-dark, dark-blue, photon-dark, boxy-light
theme: '',
// theme when dark mode
darkTheme: '',
},
disqus: {
// https://help.disqus.com/en/articles/1717111-what-s-a-shortname
shortname: process.env.NEXT_PUBLIC_DISQUS_SHORTNAME,
},
},
}
module.exports = siteMetadata
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
layouts/AuthorLayout.js | JavaScript | import SocialIcon from '@/components/social-icons'
import Image from '@/components/Image'
import { PageSeo } from '@/components/SEO'
export default function AuthorLayout({ children, frontMatter }) {
const {
name,
avatar,
occupation,
company,
instagram,
twitter,
linkedin,
openstreetmap,
github,
} = frontMatter
return (
<>
<PageSeo title={`About - ${name}`} description={`About me - ${name}`} />
<div className="divide-y">
<div className="pt-6 pb-8 space-y-2 md:space-y-5">
<h1 className="text-3xl font-extrabold leading-9 tracking-tight text-gray-900 dark:text-gray-100 sm:text-4xl sm:leading-10 md:text-6xl md:leading-14">
About
</h1>
</div>
<div className="items-start space-y-2 xl:grid xl:grid-cols-3 xl:gap-x-8 xl:space-y-0">
<div className="flex flex-col items-center pt-8 space-x-2">
<Image
src={avatar}
alt="avatar"
width="192px"
height="192px"
className="w-48 h-48 rounded-full"
/>
<h3 className="pt-4 pb-2 text-2xl font-bold leading-8 tracking-tight">{name}</h3>
<div className="text-gray-500 dark:text-gray-400">{occupation}</div>
<div className="text-gray-500 dark:text-gray-400">{company}</div>
<div className="flex pt-6 space-x-3">
<SocialIcon kind="github" href={github} />
<SocialIcon kind="instagram" href={instagram} />
<SocialIcon kind="linkedin" href={linkedin} />
<SocialIcon kind="openstreetmap" href={openstreetmap} />
<SocialIcon kind="twitter" href={twitter} />
</div>
</div>
<div className="pt-8 pb-8 prose dark:prose-dark max-w-none xl:col-span-2">{children}</div>
</div>
</div>
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
layouts/ListLayout.js | JavaScript | import Link from '@/components/Link'
import Tag from '@/components/Tag'
import siteMetadata from '@/data/siteMetadata'
import { useState } from 'react'
import Pagination from '@/components/Pagination'
import formatDate from '@/lib/utils/formatDate'
export default function ListLayout({ posts, title, initialDisplayPosts = [], pagination }) {
const [searchValue, setSearchValue] = useState('')
const filteredBlogPosts = posts.filter((frontMatter) => {
const searchContent = frontMatter.title + frontMatter.summary + frontMatter.tags.join(' ')
return searchContent.toLowerCase().includes(searchValue.toLowerCase())
})
// If initialDisplayPosts exist, display it if no searchValue is specified
const displayPosts =
initialDisplayPosts.length > 0 && !searchValue ? initialDisplayPosts : filteredBlogPosts
return (
<>
<div className="divide-y">
<div className="pt-6 pb-8 space-y-2 md:space-y-5">
<h1 className="text-3xl font-extrabold leading-9 tracking-tight text-gray-900 dark:text-gray-100 sm:text-4xl sm:leading-10 md:text-6xl md:leading-14">
{title}
</h1>
<div className="relative max-w-lg">
<input
aria-label="Search articles"
type="text"
onChange={(e) => setSearchValue(e.target.value)}
placeholder="Search articles"
className="block w-full px-4 py-2 text-gray-900 bg-white border border-gray-300 rounded-md dark:border-gray-900 focus:ring-primary-500 focus:border-primary-500 dark:bg-gray-800 dark:text-gray-100"
/>
<svg
className="absolute w-5 h-5 text-gray-400 right-3 top-3 dark:text-gray-300"
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z"
/>
</svg>
</div>
</div>
<ul>
{!filteredBlogPosts.length && 'No posts found.'}
{displayPosts.map((frontMatter) => {
const { slug, date, title, summary, tags } = frontMatter
return (
<li key={slug} className="py-4">
<article className="space-y-2 xl:grid xl:grid-cols-4 xl:space-y-0 xl:items-baseline">
<dl>
<dt className="sr-only">Published on</dt>
<dd className="text-base font-medium leading-6 text-gray-500 dark:text-gray-400">
<time dateTime={date}>{formatDate(date)}</time>
</dd>
</dl>
<div className="space-y-3 xl:col-span-3">
<div>
<h3 className="text-2xl font-bold leading-8 tracking-tight">
<Link href={`/blog/${slug}`} className="text-gray-900 dark:text-gray-100">
{title}
</Link>
</h3>
<div className="flex flex-wrap">
{tags.map((tag) => (
<Tag key={tag} text={tag} />
))}
</div>
</div>
<div className="prose text-gray-500 max-w-none dark:text-gray-400">
{summary}
</div>
</div>
</article>
</li>
)
})}
</ul>
</div>
{pagination && pagination.totalPages > 1 && !searchValue && (
<Pagination currentPage={pagination.currentPage} totalPages={pagination.totalPages} />
)}
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
layouts/PostLayout.js | JavaScript | import Link from '@/components/Link'
import PageTitle from '@/components/PageTitle'
import SectionContainer from '@/components/SectionContainer'
import { BlogSeo } from '@/components/SEO'
import Image from '@/components/Image'
import Tag from '@/components/Tag'
import siteMetadata from '@/data/siteMetadata'
import Comments from '@/components/comments'
const editUrl = (fileName) => `${siteMetadata.siteRepo}/blob/master/data/blog/${fileName}`
const discussUrl = (slug) =>
`https://mobile.twitter.com/search?q=${encodeURIComponent(
`${siteMetadata.siteUrl}/blog/${slug}`
)}`
const postDateTemplate = { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' }
export default function PostLayout({ frontMatter, authorDetails, next, prev, children }) {
const { slug, fileName, date, title, tags } = frontMatter
return (
<SectionContainer>
<BlogSeo
url={`${siteMetadata.siteUrl}/blog/${slug}`}
authorDetails={authorDetails}
{...frontMatter}
/>
<article>
<div className="xl:divide-y xl:divide-gray-200 xl:dark:divide-gray-700">
<header className="pt-6 xl:pb-6">
<div className="space-y-1 text-center">
<dl className="space-y-10">
<div>
<dt className="sr-only">Published on</dt>
<dd className="text-base font-medium leading-6 text-gray-500 dark:text-gray-400">
<time dateTime={date}>
{new Date(date).toLocaleDateString(siteMetadata.locale, postDateTemplate)}
</time>
</dd>
</div>
</dl>
<div>
<PageTitle>{title}</PageTitle>
</div>
</div>
</header>
<div
className="pb-8 divide-y divide-gray-200 xl:divide-y-0 dark:divide-gray-700 xl:grid xl:grid-cols-4 xl:gap-x-6"
style={{ gridTemplateRows: 'auto 1fr' }}
>
<dl className="pt-6 pb-10 xl:pt-11 xl:border-b xl:border-gray-200 xl:dark:border-gray-700">
<dt className="sr-only">Authors</dt>
<dd>
<ul className="flex justify-center space-x-8 xl:block sm:space-x-12 xl:space-x-0 xl:space-y-8">
{authorDetails.map((author) => (
<li className="flex items-center space-x-2" key={author.name}>
{author.avatar && (
<Image
src={author.avatar}
width="38px"
height="38px"
alt="avatar"
className="w-10 h-10 rounded-full"
/>
)}
<dl className="text-sm font-medium leading-5 whitespace-nowrap">
<dt className="sr-only">Name</dt>
<dd className="text-gray-900 dark:text-gray-100">{author.name}</dd>
<dt className="sr-only">Twitter</dt>
<dd>
{author.twitter && (
<Link
href={author.twitter}
className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
>
{author.twitter.replace('https://twitter.com/', '@')}
</Link>
)}
</dd>
</dl>
</li>
))}
</ul>
</dd>
</dl>
<div className="divide-y divide-gray-200 dark:divide-gray-700 xl:pb-0 xl:col-span-3 xl:row-span-2">
<div className="pt-10 pb-8 prose dark:prose-dark max-w-none">{children}</div>
<div className="pt-6 pb-6 text-sm italic text-gray-700 dark:text-gray-300">
<Link href={discussUrl(slug)} rel="nofollow">
{'Comment on Twitter'}
</Link>
</div>
<Comments frontMatter={frontMatter} />
</div>
<footer>
<div className="text-sm font-medium leading-5 divide-gray-200 xl:divide-y dark:divide-gray-700 xl:col-start-1 xl:row-start-2">
{tags && (
<div className="py-4 xl:py-8">
<h2 className="text-xs tracking-wide text-gray-500 uppercase dark:text-gray-400">
Tags
</h2>
<div className="flex flex-wrap">
{tags.map((tag) => (
<Tag key={tag} text={tag} />
))}
</div>
</div>
)}
{(next || prev) && (
<div className="flex justify-between py-4 xl:block xl:space-y-8 xl:py-8">
{prev && (
<div>
<h2 className="text-xs tracking-wide text-gray-500 uppercase dark:text-gray-400">
Previous Article
</h2>
<div className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400">
<Link href={`/blog/${prev.slug}`}>{prev.title}</Link>
</div>
</div>
)}
{next && (
<div>
<h2 className="text-xs tracking-wide text-gray-500 uppercase dark:text-gray-400">
Next Article
</h2>
<div className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400">
<Link href={`/blog/${next.slug}`}>{next.title}</Link>
</div>
</div>
)}
</div>
)}
</div>
<div className="pt-4 xl:pt-8">
<Link
href="/blog"
className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
>
← Back to the blog
</Link>
</div>
</footer>
</div>
</div>
</article>
</SectionContainer>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
layouts/PostSimple.js | JavaScript | import Link from '@/components/Link'
import PageTitle from '@/components/PageTitle'
import SectionContainer from '@/components/SectionContainer'
import { BlogSeo } from '@/components/SEO'
import siteMetadata from '@/data/siteMetadata'
import formatDate from '@/lib/utils/formatDate'
import Comments from '@/components/comments'
export default function PostLayout({ frontMatter, authorDetails, next, prev, children }) {
const { date, title } = frontMatter
return (
<SectionContainer>
<BlogSeo url={`${siteMetadata.siteUrl}/blog/${frontMatter.slug}`} {...frontMatter} />
<article>
<div>
<header>
<div className="pb-10 space-y-1 text-center border-b border-gray-200 dark:border-gray-700">
<dl>
<div>
<dt className="sr-only">Published on</dt>
<dd className="text-base font-medium leading-6 text-gray-500 dark:text-gray-400">
<time dateTime={date}>{formatDate(date)}</time>
</dd>
</div>
</dl>
<div>
<PageTitle>{title}</PageTitle>
</div>
</div>
</header>
<div
className="pb-8 divide-y divide-gray-200 xl:divide-y-0 dark:divide-gray-700 "
style={{ gridTemplateRows: 'auto 1fr' }}
>
<div className="divide-y divide-gray-200 dark:divide-gray-700 xl:pb-0 xl:col-span-3 xl:row-span-2">
<div className="pt-10 pb-8 prose dark:prose-dark max-w-none">{children}</div>
</div>
<Comments frontMatter={frontMatter} />
<footer>
<div className="flex flex-col text-sm font-medium sm:flex-row sm:justify-between sm:text-base">
{prev && (
<div className="pt-4 xl:pt-8">
<Link
href={`/blog/${prev.slug}`}
className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
>
← {prev.title}
</Link>
</div>
)}
{next && (
<div className="pt-4 xl:pt-8">
<Link
href={`/blog/${next.slug}`}
className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
>
{next.title} →
</Link>
</div>
)}
</div>
</footer>
</div>
</div>
</article>
</SectionContainer>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/generate-rss.js | JavaScript | import { escape } from '@/lib/utils/htmlEscaper'
import siteMetadata from '@/data/siteMetadata'
const generateRssItem = (post) => `
<item>
<guid>${siteMetadata.siteUrl}/blog/${post.slug}</guid>
<title>${escape(post.title)}</title>
<link>${siteMetadata.siteUrl}/blog/${post.slug}</link>
${post.summary && `<description>${escape(post.summary)}</description>`}
<pubDate>${new Date(post.date).toUTCString()}</pubDate>
<author>${siteMetadata.email} (${siteMetadata.author})</author>
${post.tags && post.tags.map((t) => `<category>${t}</category>`).join('')}
</item>
`
const generateRss = (posts, page = 'feed.xml') => `
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>${escape(siteMetadata.title)}</title>
<link>${siteMetadata.siteUrl}/blog</link>
<description>${escape(siteMetadata.description)}</description>
<language>${siteMetadata.language}</language>
<managingEditor>${siteMetadata.email} (${siteMetadata.author})</managingEditor>
<webMaster>${siteMetadata.email} (${siteMetadata.author})</webMaster>
<lastBuildDate>${new Date(posts[0].date).toUTCString()}</lastBuildDate>
<atom:link href="${siteMetadata.siteUrl}/${page}" rel="self" type="application/rss+xml"/>
${posts.map(generateRssItem).join('')}
</channel>
</rss>
`
export default generateRss
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/img-to-jsx.js | JavaScript | const visit = require('unist-util-visit')
const sizeOf = require('image-size')
const fs = require('fs')
module.exports = (options) => (tree) => {
visit(
tree,
// only visit p tags that contain an img element
(node) => node.type === 'paragraph' && node.children.some((n) => n.type === 'image'),
(node) => {
const imageNode = node.children.find((n) => n.type === 'image')
// only local files
if (fs.existsSync(`${process.cwd()}/public${imageNode.url}`)) {
const dimensions = sizeOf(`${process.cwd()}/public${imageNode.url}`)
// Convert original node to next/image
;(imageNode.type = 'mdxJsxFlowElement'),
(imageNode.name = 'Image'),
(imageNode.attributes = [
{ type: 'mdxJsxAttribute', name: 'alt', value: imageNode.alt },
{ type: 'mdxJsxAttribute', name: 'src', value: imageNode.url },
{ type: 'mdxJsxAttribute', name: 'width', value: dimensions.width },
{ type: 'mdxJsxAttribute', name: 'height', value: dimensions.height },
])
// Change node type from p to div to avoid nesting error
node.type = 'div'
node.children = [imageNode]
}
}
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/mdx.js | JavaScript | import { bundleMDX } from 'mdx-bundler'
import fs from 'fs'
import matter from 'gray-matter'
import path from 'path'
import readingTime from 'reading-time'
import visit from 'unist-util-visit'
import codeTitles from './remark-code-title'
import remarkTocHeadings from './remark-toc-headings'
import imgToJsx from './img-to-jsx'
import getAllFilesRecursively from './utils/files'
const root = process.cwd()
const tokenClassNames = {
tag: 'text-code-red',
'attr-name': 'text-code-yellow',
'attr-value': 'text-code-green',
deleted: 'text-code-red',
inserted: 'text-code-green',
punctuation: 'text-code-white',
keyword: 'text-code-purple',
string: 'text-code-green',
function: 'text-code-blue',
boolean: 'text-code-red',
comment: 'text-gray-400 italic',
}
export function getFiles(type) {
const prefixPaths = path.join(root, 'data', type)
const files = getAllFilesRecursively(prefixPaths)
// Only want to return blog/path and ignore root, replace is needed to work on Windows
return files.map((file) => file.slice(prefixPaths.length + 1).replace(/\\/g, '/'))
}
export function formatSlug(slug) {
return slug.replace(/\.(mdx|md)/, '')
}
export function dateSortDesc(a, b) {
if (a > b) return -1
if (a < b) return 1
return 0
}
export async function getFileBySlug(type, slug) {
const mdxPath = path.join(root, 'data', type, `${slug}.mdx`)
const mdPath = path.join(root, 'data', type, `${slug}.md`)
const source = fs.existsSync(mdxPath)
? fs.readFileSync(mdxPath, 'utf8')
: fs.readFileSync(mdPath, 'utf8')
// https://github.com/kentcdodds/mdx-bundler#nextjs-esbuild-enoent
if (process.platform === 'win32') {
process.env.ESBUILD_BINARY_PATH = path.join(
process.cwd(),
'node_modules',
'esbuild',
'esbuild.exe'
)
} else {
process.env.ESBUILD_BINARY_PATH = path.join(
process.cwd(),
'node_modules',
'esbuild',
'bin',
'esbuild'
)
}
let toc = []
const { frontmatter, code } = await bundleMDX(source, {
// mdx imports can be automatically source from the components directory
cwd: path.join(process.cwd(), 'components'),
xdmOptions(options) {
// this is the recommended way to add custom remark/rehype plugins:
// The syntax might look weird, but it protects you in case we add/remove
// plugins in the future.
options.remarkPlugins = [
...(options.remarkPlugins ?? []),
require('remark-slug'),
require('remark-autolink-headings'),
[remarkTocHeadings, { exportRef: toc }],
require('remark-gfm'),
codeTitles,
[require('remark-footnotes'), { inlineNotes: true }],
require('remark-math'),
imgToJsx,
]
options.rehypePlugins = [
...(options.rehypePlugins ?? []),
require('rehype-katex'),
[require('rehype-prism-plus'), { ignoreMissing: true }],
() => {
return (tree) => {
visit(tree, 'element', (node, index, parent) => {
let [token, type] = node.properties.className || []
if (token === 'token') {
node.properties.className = [tokenClassNames[type]]
}
})
}
},
]
return options
},
esbuildOptions: (options) => {
options.loader = {
...options.loader,
'.js': 'jsx',
}
return options
},
})
return {
mdxSource: code,
toc,
frontMatter: {
readingTime: readingTime(code),
slug: slug || null,
fileName: fs.existsSync(mdxPath) ? `${slug}.mdx` : `${slug}.md`,
...frontmatter,
date: frontmatter.date ? new Date(frontmatter.date).toISOString() : null,
},
}
}
export async function getAllFilesFrontMatter(folder) {
const prefixPaths = path.join(root, 'data', folder)
const files = getAllFilesRecursively(prefixPaths)
const allFrontMatter = []
files.forEach((file) => {
// Replace is needed to work on Windows
const fileName = file.slice(prefixPaths.length + 1).replace(/\\/g, '/')
// Remove Unexpected File
if (path.extname(fileName) !== '.md' && path.extname(fileName) !== '.mdx') {
return
}
const source = fs.readFileSync(file, 'utf8')
const { data: frontmatter } = matter(source)
if (frontmatter.draft !== true) {
allFrontMatter.push({
...frontmatter,
slug: formatSlug(fileName),
date: frontmatter.date ? new Date(frontmatter.date).toISOString() : null,
})
}
})
return allFrontMatter.sort((a, b) => dateSortDesc(a.date, b.date))
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/remark-code-title.js | JavaScript | import visit from 'unist-util-visit'
module.exports = function (options) {
return (tree) =>
visit(tree, 'code', (node, index) => {
const nodeLang = node.lang || ''
let language = ''
let title = ''
if (nodeLang.includes(':')) {
language = nodeLang.slice(0, nodeLang.search(':'))
title = nodeLang.slice(nodeLang.search(':') + 1, nodeLang.length)
}
if (!title) {
return
}
const className = 'remark-code-title'
const titleNode = {
type: 'mdxJsxFlowElement',
name: 'div',
attributes: [{ type: 'mdxJsxAttribute', name: 'className', value: className }],
children: [{ type: 'text', value: title }],
data: { _xdmExplicitJsx: true },
}
tree.children.splice(index, 0, titleNode)
node.lang = language
})
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/remark-toc-headings.js | JavaScript | import visit from 'unist-util-visit'
module.exports = function (options) {
return (tree) =>
visit(tree, 'heading', (node, index, parent) => {
options.exportRef.push({
value: node.children[1].value,
url: node.children[0].url,
depth: node.depth,
})
})
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/tags.js | JavaScript | import fs from 'fs'
import matter from 'gray-matter'
import path from 'path'
import { getFiles } from './mdx'
import kebabCase from './utils/kebabCase'
const root = process.cwd()
export async function getAllTags(type) {
const files = await getFiles(type)
let tagCount = {}
// Iterate through each post, putting all found tags into `tags`
files.forEach((file) => {
const source = fs.readFileSync(path.join(root, 'data', type, file), 'utf8')
const { data } = matter(source)
if (data.tags && data.draft !== true) {
data.tags.forEach((tag) => {
const formattedTag = kebabCase(tag)
if (formattedTag in tagCount) {
tagCount[formattedTag] += 1
} else {
tagCount[formattedTag] = 1
}
})
}
})
return tagCount
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/utils/files.js | JavaScript | import fs from 'fs'
import path from 'path'
const pipe = (...fns) => (x) => fns.reduce((v, f) => f(v), x)
const flattenArray = (input) =>
input.reduce((acc, item) => [...acc, ...(Array.isArray(item) ? item : [item])], [])
const map = (fn) => (input) => input.map(fn)
const walkDir = (fullPath) => {
return fs.statSync(fullPath).isFile() ? fullPath : getAllFilesRecursively(fullPath)
}
const pathJoinPrefix = (prefix) => (extraPath) => path.join(prefix, extraPath)
const getAllFilesRecursively = (folder) =>
pipe(fs.readdirSync, map(pipe(pathJoinPrefix(folder), walkDir)), flattenArray)(folder)
export default getAllFilesRecursively
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/utils/formatDate.js | JavaScript | import siteMetadata from '@/data/siteMetadata'
const formatDate = (date) => {
const options = {
year: 'numeric',
month: 'long',
day: 'numeric',
}
const now = new Date(date).toLocaleDateString(siteMetadata.locale, options)
return now
}
export default formatDate
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/utils/htmlEscaper.js | JavaScript | const { replace } = ''
// escape
const es = /&(?:amp|#38|lt|#60|gt|#62|apos|#39|quot|#34);/g
const ca = /[&<>'"]/g
const esca = {
'&': '&',
'<': '<',
'>': '>',
"'": ''',
'"': '"',
}
const pe = (m) => esca[m]
/**
* Safely escape HTML entities such as `&`, `<`, `>`, `"`, and `'`.
* @param {string} es the input to safely escape
* @returns {string} the escaped input, and it **throws** an error if
* the input type is unexpected, except for boolean and numbers,
* converted as string.
*/
export const escape = (es) => replace.call(es, ca, pe)
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
lib/utils/kebabCase.js | JavaScript | const kebabCase = (str) =>
str &&
str
.match(/[A-Z]{2,}(?=[A-Z][a-z]+[0-9]*|\b)|[A-Z]?[a-z]+[0-9]*|[A-Z]|[0-9]+/g)
.map((x) => x.toLowerCase())
.join('-')
export default kebabCase
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
next.config.js | JavaScript | const withBundleAnalyzer = require('@next/bundle-analyzer')({
enabled: process.env.ANALYZE === 'true',
})
module.exports = withBundleAnalyzer({
reactStrictMode: true,
pageExtensions: ['js', 'jsx', 'md', 'mdx'],
eslint: {
dirs: ['pages', 'components', 'lib', 'layouts', 'scripts'],
},
webpack: (config, { dev, isServer }) => {
config.module.rules.push({
test: /\.(png|jpe?g|gif|mp4)$/i,
use: [
{
loader: 'file-loader',
options: {
publicPath: '/_next',
name: 'static/media/[name].[hash].[ext]',
},
},
],
})
config.module.rules.push({
test: /\.svg$/,
use: ['@svgr/webpack'],
})
if (!dev && !isServer) {
// Replace React with Preact only in client production build
Object.assign(config.resolve.alias, {
react: 'preact/compat',
'react-dom/test-utils': 'preact/test-utils',
'react-dom': 'preact/compat',
})
}
return config
},
})
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/404.js | JavaScript | import Link from '@/components/Link'
export default function FourZeroFour() {
return (
<div className="flex flex-col items-start justify-start md:justify-center md:items-center md:flex-row md:space-x-6 md:mt-24">
<div className="pt-6 pb-8 space-x-2 md:space-y-5">
<h1 className="text-6xl font-extrabold leading-9 tracking-tight text-gray-900 dark:text-gray-100 md:text-8xl md:leading-14 md:border-r-2 md:px-6">
404
</h1>
</div>
<div className="max-w-md">
<p className="mb-4 text-xl font-bold leading-normal md:text-2xl">
Sorry we couldn't find this page.
</p>
<p className="mb-8">But dont worry, you can find plenty of other things on our homepage.</p>
<Link href="/">
<button className="inline px-4 py-2 text-sm font-medium leading-5 text-white transition-colors duration-150 bg-blue-600 border border-transparent rounded-lg shadow focus:outline-none focus:shadow-outline-blue hover:bg-blue-700 dark:hover:bg-blue-500">
Back to homepage
</button>
</Link>
</div>
</div>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/_app.js | JavaScript | import '@/css/tailwind.css'
import { ThemeProvider } from 'next-themes'
import Head from 'next/head'
import Analytics from '@/components/analytics'
import LayoutWrapper from '@/components/LayoutWrapper'
export default function App({ Component, pageProps }) {
return (
<ThemeProvider attribute="class">
<Head>
<meta content="width=device-width, initial-scale=1" name="viewport" />
</Head>
<Analytics />
<LayoutWrapper>
<Component {...pageProps} />
</LayoutWrapper>
</ThemeProvider>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/_document.js | JavaScript | import Document, { Html, Head, Main, NextScript } from 'next/document'
class MyDocument extends Document {
render() {
return (
<Html lang="en">
<Head>
<link rel="apple-touch-icon" sizes="76x76" href="/static/favicons/apple-touch-icon.png" />
<link
rel="icon"
type="image/png"
sizes="32x32"
href="/static/favicons/favicon-32x32.png"
/>
<link
rel="icon"
type="image/png"
sizes="16x16"
href="/static/favicons/favicon-16x16.png"
/>
<link rel="manifest" href="/static/favicons/site.webmanifest" />
<link rel="mask-icon" href="/static/favicons/safari-pinned-tab.svg" color="#5bbad5" />
<meta name="msapplication-TileColor" content="#000000" />
<meta name="theme-color" content="#000000" />
<link rel="alternate" type="application/rss+xml" href="/feed.xml" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossOrigin="anonymous" />
<link
href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&family=Caveat:wght@700&display=swap"
rel="stylesheet"
/>
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/katex@0.13.11/dist/katex.min.css"
integrity="sha384-Um5gpz1odJg5Z4HAmzPtgZKdTBHZdw8S29IecapCSB31ligYPhHQZMIlWLYQGVoc"
crossOrigin="anonymous"
/>
</Head>
<body className="antialiased text-black bg-white dark:bg-gray-900 dark:text-white">
<Main />
<NextScript />
</body>
</Html>
)
}
}
export default MyDocument
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/about.js | JavaScript | import { MDXLayoutRenderer } from '@/components/MDXComponents'
import { getFileBySlug } from '@/lib/mdx'
const DEFAULT_LAYOUT = 'AuthorLayout'
export async function getStaticProps() {
const authorDetails = await getFileBySlug('authors', ['default'])
return { props: { authorDetails } }
}
export default function About({ authorDetails }) {
const { mdxSource, frontMatter } = authorDetails
return (
<MDXLayoutRenderer
layout={frontMatter.layout || DEFAULT_LAYOUT}
mdxSource={mdxSource}
frontMatter={frontMatter}
/>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/blog.js | JavaScript | import { getAllFilesFrontMatter } from '@/lib/mdx'
import siteMetadata from '@/data/siteMetadata'
import ListLayout from '@/layouts/ListLayout'
import { PageSeo } from '@/components/SEO'
export const POSTS_PER_PAGE = 5
export async function getStaticProps() {
const posts = await getAllFilesFrontMatter('blog')
const initialDisplayPosts = posts.slice(0, POSTS_PER_PAGE)
const pagination = {
currentPage: 1,
totalPages: Math.ceil(posts.length / POSTS_PER_PAGE),
}
return { props: { initialDisplayPosts, posts, pagination } }
}
export default function Blog({ posts, initialDisplayPosts, pagination }) {
return (
<>
<PageSeo title={`Blog - ${siteMetadata.author}`} description={siteMetadata.description} />
<ListLayout
posts={posts}
initialDisplayPosts={initialDisplayPosts}
pagination={pagination}
title="All Posts"
/>
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/blog/[...slug].js | JavaScript | import fs from 'fs'
import PageTitle from '@/components/PageTitle'
import generateRss from '@/lib/generate-rss'
import { MDXLayoutRenderer } from '@/components/MDXComponents'
import { formatSlug, getAllFilesFrontMatter, getFileBySlug, getFiles } from '@/lib/mdx'
const DEFAULT_LAYOUT = 'PostLayout'
export async function getStaticPaths() {
const posts = getFiles('blog')
return {
paths: posts.map((p) => ({
params: {
slug: formatSlug(p).split('/'),
},
})),
fallback: false,
}
}
export async function getStaticProps({ params }) {
const allPosts = await getAllFilesFrontMatter('blog')
const postIndex = allPosts.findIndex((post) => formatSlug(post.slug) === params.slug.join('/'))
const prev = allPosts[postIndex + 1] || null
const next = allPosts[postIndex - 1] || null
const post = await getFileBySlug('blog', params.slug.join('/'))
const authorList = post.frontMatter.authors || ['default']
const authorPromise = authorList.map(async (author) => {
const authorResults = await getFileBySlug('authors', [author])
return authorResults.frontMatter
})
const authorDetails = await Promise.all(authorPromise)
// rss
const rss = generateRss(allPosts)
fs.writeFileSync('./public/feed.xml', rss)
return { props: { post, authorDetails, prev, next } }
}
export default function Blog({ post, authorDetails, prev, next }) {
const { mdxSource, toc, frontMatter } = post
return (
<>
{frontMatter.draft !== true ? (
<MDXLayoutRenderer
layout={frontMatter.layout || DEFAULT_LAYOUT}
toc={toc}
mdxSource={mdxSource}
frontMatter={frontMatter}
authorDetails={authorDetails}
prev={prev}
next={next}
/>
) : (
<div className="mt-24 text-center">
<PageTitle>
Under Construction{' '}
<span role="img" aria-label="roadwork sign">
🚧
</span>
</PageTitle>
</div>
)}
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/blog/page/[page].js | JavaScript | import { PageSeo } from '@/components/SEO'
import siteMetadata from '@/data/siteMetadata'
import { getAllFilesFrontMatter } from '@/lib/mdx'
import ListLayout from '@/layouts/ListLayout'
import { POSTS_PER_PAGE } from '../../blog'
export async function getStaticPaths() {
const totalPosts = await getAllFilesFrontMatter('blog')
const totalPages = Math.ceil(totalPosts.length / POSTS_PER_PAGE)
const paths = Array.from({ length: totalPages }, (_, i) => ({
params: { page: (i + 1).toString() },
}))
return {
paths,
fallback: false,
}
}
export async function getStaticProps(context) {
const {
params: { page },
} = context
const posts = await getAllFilesFrontMatter('blog')
const pageNumber = parseInt(page)
const initialDisplayPosts = posts.slice(
POSTS_PER_PAGE * (pageNumber - 1),
POSTS_PER_PAGE * pageNumber
)
const pagination = {
currentPage: pageNumber,
totalPages: Math.ceil(posts.length / POSTS_PER_PAGE),
}
return {
props: {
posts,
initialDisplayPosts,
pagination,
},
}
}
export default function PostPage({ posts, initialDisplayPosts, pagination }) {
return (
<>
<PageSeo title={siteMetadata.title} description={siteMetadata.description} />
<ListLayout
posts={posts}
initialDisplayPosts={initialDisplayPosts}
pagination={pagination}
title="All Posts"
/>
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/index.js | JavaScript | import Link from '@/components/Link'
import { PageSeo } from '@/components/SEO'
import siteMetadata from '@/data/siteMetadata'
import { getAllFilesFrontMatter } from '@/lib/mdx'
import formatDate from '@/lib/utils/formatDate'
const MAX_DISPLAY = 5
export async function getStaticProps() {
const posts = await getAllFilesFrontMatter('blog')
return { props: { posts } }
}
export default function Home({ posts }) {
return (
<>
<PageSeo title={siteMetadata.title} description={siteMetadata.description} />
<div className="divide-y divide-gray-200 dark:divide-gray-700">
<div className="pb-6 space-y-2 md:space-y-5 ">
<p className="text-lg leading-7 text-gray-500 dark:text-gray-400">
{siteMetadata.description}
</p>
<div className="w-full lg:h-96 md:h-56">
<img className="object-cover rounded" src={siteMetadata.head} alt="head" />
</div>
</div>
<ul className="divide-y divide-gray-200 dark:divide-gray-700">
{!posts.length && 'No posts found.'}
{posts.slice(0, MAX_DISPLAY).map((frontMatter) => {
const { slug, date, title, summary } = frontMatter
return (
<li key={slug} className="py-12">
<article>
<div className="space-y-2 xl:grid xl:grid-cols-4 xl:space-y-0 xl:items-baseline">
<dl>
<dt className="sr-only">Published on</dt>
<dd className="text-base font-medium leading-6 text-gray-500 dark:text-gray-400">
<time dateTime={date}>{formatDate(date)}</time>
</dd>
</dl>
<div className="space-y-5 xl:col-span-3">
<div className="space-y-6">
<div>
<h2 className="text-2xl font-bold leading-8 tracking-tight">
<Link
href={`/blog/${slug}`}
className="text-gray-900 dark:text-gray-100"
>
{title}
</Link>
</h2>
</div>
<div className="prose text-gray-500 max-w-none dark:text-gray-400">
{summary}
</div>
</div>
<div className="text-base font-medium leading-6">
<Link
href={`/blog/${slug}`}
className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
aria-label={`Read "${title}"`}
>
Read more →
</Link>
</div>
</div>
</div>
</article>
</li>
)
})}
</ul>
</div>
{posts.length > MAX_DISPLAY && (
<div className="flex justify-end text-base font-medium leading-6">
<Link
href="/blog"
className="text-primary-500 hover:text-primary-600 dark:hover:text-primary-400"
aria-label="all posts"
>
All Posts →
</Link>
</div>
)}
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/projects.js | JavaScript | import siteMetadata from '@/data/siteMetadata'
import projectsData from '@/data/projectsData'
import Card from '@/components/Card'
import { PageSeo } from '@/components/SEO'
export default function Projects() {
return (
<>
<PageSeo title={`Projects - ${siteMetadata.author}`} description={siteMetadata.description} />
<div className="divide-y divide-gray-200 dark:divide-gray-700">
<div className="pt-6 pb-8 space-y-2 md:space-y-5">
<h1 className="text-3xl font-extrabold leading-9 tracking-tight text-gray-900 dark:text-gray-100 sm:text-4xl sm:leading-10 md:text-6xl md:leading-14">
Projects
</h1>
</div>
<div className="container py-12">
<div className="flex flex-wrap -m-4">
{projectsData.map((d) => (
<Card
key={d.title}
title={d.title}
description={d.description}
imgSrc={d.imgSrc}
href={d.href}
/>
))}
</div>
</div>
</div>
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/tags.js | JavaScript | import Link from '@/components/Link'
import { PageSeo } from '@/components/SEO'
import Tag from '@/components/Tag'
import siteMetadata from '@/data/siteMetadata'
import { getAllTags } from '@/lib/tags'
import kebabCase from '@/lib/utils/kebabCase'
export async function getStaticProps() {
const tags = await getAllTags('blog')
return { props: { tags } }
}
export default function Tags({ tags }) {
const sortedTags = Object.keys(tags).sort((a, b) => tags[b] - tags[a])
return (
<>
<PageSeo title={`Tags - ${siteMetadata.author}`} description="Things I blog about" />
<div className="flex flex-col items-start justify-start divide-y divide-gray-200 dark:divide-gray-700 md:justify-center md:items-center md:divide-y-0 md:flex-row md:space-x-6 md:mt-24">
<div className="pt-6 pb-8 space-x-2 md:space-y-5">
<h1 className="text-3xl font-extrabold leading-9 tracking-tight text-gray-900 dark:text-gray-100 sm:text-4xl sm:leading-10 md:text-6xl md:leading-14 md:border-r-2 md:px-6">
Tags
</h1>
</div>
<div className="flex flex-wrap max-w-lg">
{Object.keys(tags).length === 0 && 'No tags found.'}
{sortedTags.map((t) => {
return (
<div key={t} className="mt-2 mb-2 mr-5">
<Tag text={t} />
<Link
href={`/tags/${kebabCase(t)}`}
className="-ml-2 text-sm font-semibold text-gray-600 uppercase dark:text-gray-300"
>
{` (${tags[t]})`}
</Link>
</div>
)
})}
</div>
</div>
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
pages/tags/[tag].js | JavaScript | import { PageSeo } from '@/components/SEO'
import siteMetadata from '@/data/siteMetadata'
import ListLayout from '@/layouts/ListLayout'
import generateRss from '@/lib/generate-rss'
import { getAllFilesFrontMatter } from '@/lib/mdx'
import { getAllTags } from '@/lib/tags'
import kebabCase from '@/lib/utils/kebabCase'
import fs from 'fs'
import path from 'path'
const root = process.cwd()
export async function getStaticPaths() {
const tags = await getAllTags('blog')
return {
paths: Object.keys(tags).map((tag) => ({
params: {
tag,
},
})),
fallback: false,
}
}
export async function getStaticProps({ params }) {
const allPosts = await getAllFilesFrontMatter('blog')
const filteredPosts = allPosts.filter(
(post) => post.draft !== true && post.tags.map((t) => kebabCase(t)).includes(params.tag)
)
// rss
const rss = generateRss(filteredPosts, `tags/${params.tag}/feed.xml`)
const rssPath = path.join(root, 'public', 'tags', params.tag)
fs.mkdirSync(rssPath, { recursive: true })
fs.writeFileSync(path.join(rssPath, 'feed.xml'), rss)
return { props: { posts: filteredPosts, tag: params.tag } }
}
export default function Tag({ posts, tag }) {
// Capitalize first letter and convert space to dash
const title = tag[0].toUpperCase() + tag.split(' ').join('-').slice(1)
return (
<>
<PageSeo
title={`${tag} - ${siteMetadata.title}`}
description={`${tag} tags - ${siteMetadata.title}`}
/>
<ListLayout posts={posts} title={title} />
</>
)
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
postcss.config.js | JavaScript | module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
prettier.config.js | JavaScript | module.exports = {
semi: false,
singleQuote: true,
printWidth: 100,
tabWidth: 2,
useTabs: false,
trailingComma: 'es5',
bracketSpacing: true,
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
scripts/compose.js | JavaScript | const fs = require('fs')
const path = require('path')
const inquirer = require('inquirer')
const dedent = require('dedent')
const root = process.cwd()
const getAuthors = () => {
const authorPath = path.join(root, 'data', 'authors')
const authorList = fs.readdirSync(authorPath).map((filename) => path.parse(filename).name)
return authorList
}
const getLayouts = () => {
const layoutPath = path.join(root, 'layouts')
const layoutList = fs
.readdirSync(layoutPath)
.map((filename) => path.parse(filename).name)
.filter((file) => file.toLowerCase().includes('post'))
return layoutList
}
const genFrontMatter = (answers) => {
let d = new Date()
const date = [
d.getFullYear(),
('0' + (d.getMonth() + 1)).slice(-2),
('0' + d.getDate()).slice(-2),
].join('-')
const tagArray = answers.tags.split(',')
tagArray.forEach((tag, index) => (tagArray[index] = tag.trim()))
const tags = "'" + tagArray.join("','") + "'"
const authorArray = answers.authors.length > 0 ? "'" + answers.authors.join("','") + "'" : ''
let frontMatter = dedent`---
title: ${answers.title ? answers.title : 'Untitled'}
date: '${date}'
tags: [${answers.tags ? tags : ''}]
draft: ${answers.draft === 'yes' ? true : false}
summary: ${answers.summary ? answers.summary : ' '}
images: []
layout: ${answers.layout}
`
if (answers.authors.length > 0) {
frontMatter = frontMatter + '\n' + `authors: [${authorArray}]`
}
frontMatter = frontMatter + '\n---'
return frontMatter
}
inquirer
.prompt([
{
name: 'title',
message: 'Enter post title:',
type: 'input',
},
{
name: 'extension',
message: 'Choose post extension:',
type: 'list',
choices: ['mdx', 'md'],
},
{
name: 'authors',
message: 'Choose authors:',
type: 'checkbox',
choices: getAuthors,
},
{
name: 'summary',
message: 'Enter post summary:',
type: 'input',
},
{
name: 'draft',
message: 'Set post as draft?',
type: 'list',
choices: ['yes', 'no'],
},
{
name: 'tags',
message: 'Any Tags? Separate them with , or leave empty if no tags.',
type: 'input',
},
{
name: 'layout',
message: 'Select layout',
type: 'list',
choices: getLayouts,
},
])
.then((answers) => {
// Remove special characters and replace space with -
const fileName = answers.title
.toLowerCase()
.replace(/[^a-zA-Z0-9 ]/g, '')
.replace(/ /g, '-')
.replace(/-+/g, '-')
const frontMatter = genFrontMatter(answers)
const filePath = `data/blog/${fileName ? fileName : 'untitled'}.${
answers.extension ? answers.extension : 'md'
}`
fs.writeFile(filePath, frontMatter, { flag: 'wx' }, (err) => {
if (err) {
throw err
} else {
console.log(`Blog post generated successfully at ${filePath}`)
}
})
})
.catch((error) => {
if (error.isTtyError) {
console.log("Prompt couldn't be rendered in the current environment")
} else {
console.log('Something went wrong, sorry!')
}
})
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
scripts/generate-sitemap.js | JavaScript | const fs = require('fs')
const globby = require('globby')
const prettier = require('prettier')
const siteMetadata = require('../data/siteMetadata')
;(async () => {
const prettierConfig = await prettier.resolveConfig('./.prettierrc.js')
const pages = await globby([
'pages/*.js',
'data/blog/**/*.mdx',
'data/blog/**/*.md',
'public/tags/**/*.xml',
'!pages/_*.js',
'!pages/api',
])
const sitemap = `
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
${pages
.map((page) => {
const path = page
.replace('pages/', '/')
.replace('data/blog', '/blog')
.replace('public/', '/')
.replace('.js', '')
.replace('.mdx', '')
.replace('.md', '')
.replace('/feed.xml', '')
const route = path === '/index' ? '' : path
if (page === `pages/404.js` || page === `pages/blog/[...slug].js`) {
return
}
return `
<url>
<loc>${siteMetadata.siteUrl}${route}</loc>
</url>
`
})
.join('')}
</urlset>
`
const formatted = prettier.format(sitemap, {
...prettierConfig,
parser: 'html',
})
// eslint-disable-next-line no-sync
fs.writeFileSync('public/sitemap.xml', formatted)
})()
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
tailwind.config.js | JavaScript | const defaultTheme = require('tailwindcss/defaultTheme')
const colors = require('tailwindcss/colors')
module.exports = {
mode: 'jit',
purge: ['./pages/**/*.js', './components/**/*.js', './layouts/**/*.js', './lib/**/*.js'],
darkMode: 'class',
theme: {
extend: {
spacing: {
'9/16': '56.25%',
},
lineHeight: {
11: '2.75rem',
12: '3rem',
13: '3.25rem',
14: '3.5rem',
},
fontFamily: {
sans: ['Inter', ...defaultTheme.fontFamily.sans],
hand: ['Caveat', ...defaultTheme.fontFamily.sans],
},
colors: {
primary: colors.teal,
gray: colors.trueGray,
code: {
green: '#b5f4a5',
yellow: '#ffe484',
purple: '#d9a9ff',
red: '#ff8383',
blue: '#93ddfd',
white: '#fff',
},
},
typography: (theme) => ({
DEFAULT: {
css: {
color: theme('colors.gray.700'),
a: {
color: theme('colors.primary.500'),
'&:hover': {
color: theme('colors.primary.600'),
},
code: { color: theme('colors.primary.400') },
},
h1: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.900'),
},
h2: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.900'),
},
h3: {
fontWeight: '600',
color: theme('colors.gray.900'),
},
'h4,h5,h6': {
color: theme('colors.gray.900'),
},
code: {
color: theme('colors.pink.500'),
backgroundColor: theme('colors.gray.100'),
paddingLeft: '4px',
paddingRight: '4px',
paddingTop: '2px',
paddingBottom: '2px',
borderRadius: '0.25rem',
},
'code:before': {
content: 'none',
},
'code:after': {
content: 'none',
},
details: {
backgroundColor: theme('colors.gray.100'),
paddingLeft: '4px',
paddingRight: '4px',
paddingTop: '2px',
paddingBottom: '2px',
borderRadius: '0.25rem',
},
hr: { borderColor: theme('colors.gray.200') },
'ol li:before': {
fontWeight: '600',
color: theme('colors.gray.500'),
},
'ul li:before': {
backgroundColor: theme('colors.gray.500'),
},
strong: { color: theme('colors.gray.600') },
blockquote: {
color: theme('colors.gray.900'),
borderLeftColor: theme('colors.gray.200'),
},
},
},
dark: {
css: {
color: theme('colors.gray.300'),
a: {
color: theme('colors.primary.500'),
'&:hover': {
color: theme('colors.primary.400'),
},
code: { color: theme('colors.primary.400') },
},
h1: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.100'),
},
h2: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.100'),
},
h3: {
fontWeight: '600',
color: theme('colors.gray.100'),
},
'h4,h5,h6': {
color: theme('colors.gray.100'),
},
code: {
backgroundColor: theme('colors.gray.800'),
},
details: {
backgroundColor: theme('colors.gray.800'),
},
hr: { borderColor: theme('colors.gray.700') },
'ol li:before': {
fontWeight: '600',
color: theme('colors.gray.400'),
},
'ul li:before': {
backgroundColor: theme('colors.gray.400'),
},
strong: { color: theme('colors.gray.100') },
thead: {
color: theme('colors.gray.100'),
},
tbody: {
tr: {
borderBottomColor: theme('colors.gray.700'),
},
},
blockquote: {
color: theme('colors.gray.100'),
borderLeftColor: theme('colors.gray.700'),
},
},
},
}),
},
},
variants: {
typography: ['dark'],
},
plugins: [require('@tailwindcss/forms'), require('@tailwindcss/typography')],
}
| willemarcel/wille.blog | 1 | Personal blog | JavaScript | willemarcel | Wille Marcel | developmentseed |
project/lit_image_classifier.py | Python | from argparse import ArgumentParser
import os
import torch
import pytorch_lightning as pl
from pytorch_lightning.metrics import functional as PLF
from torch.nn import functional as F
from flash.vision import ImageClassificationData
from torchvision import transforms
from torchvision import models
import numpy as np
class LitClassifier(pl.LightningModule):
def __init__(self, backbone='resnet50', num_classes=5, hidden_dim=1024, learning_rate=1e-3):
super().__init__()
self.save_hyperparameters()
self.backbone = getattr(models, backbone)()
self.classifier = torch.nn.Sequential(
torch.nn.Linear(1000, hidden_dim),
torch.nn.Linear(hidden_dim, num_classes)
)
def forward(self, batch):
x, y = batch
# used only in .predict()
y_hat = self.backbone(x)
y_hat = self.classifier(y_hat)
predicted_classes = F.log_softmax(y_hat).argmax(dim=1)
return predicted_classes
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self.backbone(x)
y_hat = self.classifier(y_hat)
loss = F.cross_entropy(y_hat, y)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
x, y = batch
y_hat = self.backbone(x)
y_hat = self.classifier(y_hat)
loss = F.cross_entropy(y_hat, y)
acc = PLF.accuracy(F.log_softmax(y_hat).argmax(dim=1), y)
self.log('valid_loss', loss)
self.log('valid_acc', acc)
def test_step(self, batch, batch_idx):
x, y = batch
y_hat = self.backbone(x)
y_hat = self.classifier(y_hat)
loss = F.cross_entropy(y_hat, y)
acc = PLF.accuracy(F.log_softmax(y_hat).argmax(dim=1), y)
self.log('test_loss', loss)
self.log('test_acc', acc)
def configure_optimizers(self):
# self.hparams available because we called self.save_hyperparameters()
return torch.optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
@staticmethod
def add_model_specific_args(parent_parser):
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument('--learning_rate', type=float, default=0.0001)
parser.add_argument('--backbone', type=str, default='resnet50')
parser.add_argument('--batch_size', default=32, type=int)
parser.add_argument('--num_classes', default=5, type=int)
parser.add_argument('--hidden_dim', type=int, default=1024)
return parser
def cli_main():
pl.seed_everything(1234)
# ------------
# args
# ------------
parser = ArgumentParser()
parser.add_argument('--data_dir', type=str, default='.')
# add trainer args (gpus=x, precision=...)
parser = pl.Trainer.add_argparse_args(parser)
# add model args (batch_size hidden_dim, etc...), anything defined in add_model_specific_args
parser = LitClassifier.add_model_specific_args(parser)
args = parser.parse_args()
# ------------
# data
# ------------
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.4913, 0.482, 0.446], std=[0.247, 0.243, 0.261])
])
# in real life you would have a separate validation split
datamodule = ImageClassificationData.from_folders(
train_folder=args.data_dir + '/train',
valid_folder=args.data_dir + '/test',
test_folder=args.data_dir + '/test',
batch_size=args.batch_size,
transform=transform
)
# ------------
# model
# ------------
model = LitClassifier(
backbone=args.backbone,
learning_rate=args.learning_rate,
hidden_dim=args.hidden_dim
)
# ------------
# training
# ------------
trainer = pl.Trainer.from_argparse_args(args, fast_dev_run=True)
trainer.fit(model, datamodule.train_dataloader(), datamodule.val_dataloader())
# ------------
# testing
# ------------
result = trainer.test(model, test_dataloaders=datamodule.test_dataloader())
print(result)
# predicting
preds = trainer.predict(model, datamodule.test_dataloader())
preds = list(np.stack(preds).flatten())
path = os.getcwd() + '/predictions.txt'
with open(path, 'w') as f:
preds = [str(x) for x in preds]
f.write('\n'.join(preds))
if __name__ == '__main__':
cli_main()
| williamFalcon/cifar5 | 5 | Python | williamFalcon | William Falcon | Lightning AI | |
setup.py | Python | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='project',
version='0.0.0',
description='Describe Your Cool Project',
author='',
author_email='',
# REPLACE WITH YOUR OWN GITHUB PROJECT LINK
url='https://github.com/PyTorchLightning/pytorch-lightning-conference-seed',
install_requires=['pytorch-lightning'],
packages=find_packages(),
)
| williamFalcon/cifar5 | 5 | Python | williamFalcon | William Falcon | Lightning AI | |
cifar5.py | Python | import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
from flash.image import ImageClassificationData, ImageClassifier
import argparse
from pytorch_lightning import seed_everything
from flash import Trainer
from pytorch_lightning.callbacks import LearningRateMonitor
from pytorch_lightning.loggers import TensorBoardLogger
seed_everything(7)
parser = argparse.ArgumentParser()
parser.add_argument('--gpus', type=int, default=0,
help='number of gpus to use for training')
parser.add_argument('--strategy', type=str, default='ddp',
help='strategy to use for training')
parser.add_argument('--batch_size', type=int, default=64,
help='batch size to use for training')
parser.add_argument('--epochs', type=int, default=5,
help='maximum number of epochs for training')
parser.add_argument('--data_dir', type=str, default='/datastores/cifar5',
help='the directory to load data from')
parser.add_argument('--learning_rate', type=float, default=1e-4,
help='the learning rate to use during model training')
parser.add_argument('--optimizer', type=str, default='Adam',
help='the optimizer to use during model training')
args = parser.parse_args()
transform = torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize(mean=[0.4913, 0.482, 0.446], std=[0.247, 0.243, 0.261])
])
datamodule = ImageClassificationData.from_folders(
train_folder=args.data_dir + '/train',
val_folder=args.data_dir + '/test',
test_folder=args.data_dir + '/test',
batch_size=args.batch_size,
transform_kwargs={'mean': (0.4913, 0.482, 0.446), 'std': (0.247, 0.243, 0.261)}
)
# %%
model = ImageClassifier(backbone="resnet18", num_classes=datamodule.num_classes, learning_rate=args.learning_rate, optimizer=args.optimizer)
trainer = Trainer(
progress_bar_refresh_rate=10,
max_epochs=args.epochs,
gpus=args.gpus,
#logger=TensorBoardLogger("lightning_logs/", name="resnet"),
#callbacks=[LearningRateMonitor(logging_interval="step")],
)
if __name__ == '__main__':
trainer.fit(model, datamodule=datamodule)
#print('finished fitting')
trainer.test(model, datamodule=datamodule) | williamFalcon/cifar5-simple | 0 | Python | williamFalcon | William Falcon | Lightning AI | |
image_plotting_callback.py | Python | from matplotlib.pyplot import imshow, figure
import numpy as np
from torchvision.utils import make_grid
from pl_bolts.transforms.dataset_normalizations import cifar10_normalization
import pytorch_lightning as pl
import torch
class ImageSampler(pl.Callback):
def __init__(self):
super().__init__()
self.img_size = None
self.num_preds = 16
def on_train_epoch_end(self, trainer, pl_module, outputs):
figure(figsize=(8, 3), dpi=300)
# Z COMES FROM NORMAL(0, 1)
rand_v = torch.rand((self.num_preds, pl_module.hparams.latent_dim), device=pl_module.device)
p = torch.distributions.Normal(torch.zeros_like(rand_v), torch.zeros_like(rand_v))
z = p.rsample()
# SAMPLE IMAGES
with torch.no_grad():
pred = pl_module.decoder(z.to(pl_module.device)).cpu()
# UNDO DATA NORMALIZATION
normalize = cifar10_normalization()
mean, std = np.array(normalize.mean), np.array(normalize.std)
img = make_grid(pred).permute(1, 2, 0).numpy() * std + mean
# PLOT IMAGES
trainer.logger.experiment.add_image('img',torch.tensor(img).permute(2, 0, 1), global_step=trainer.global_step)
| williamFalcon/pytorch-lightning-vae | 211 | VAE for color images | Python | williamFalcon | William Falcon | Lightning AI |
vae.py | Python | import pytorch_lightning as pl
pl.seed_everything(1234)
from torch import nn
import torch
from pl_bolts.models.autoencoders.components import (
resnet18_decoder,
resnet18_encoder,
)
from pl_bolts.datamodules import CIFAR10DataModule, ImagenetDataModule
from image_plotting_callback import ImageSampler
from argparse import ArgumentParser
class VAE(pl.LightningModule):
def __init__(self, enc_out_dim=512, latent_dim=256, input_height=32):
super().__init__()
self.save_hyperparameters()
# encoder, decoder
self.encoder = resnet18_encoder(False, False)
self.decoder = resnet18_decoder(
latent_dim=latent_dim,
input_height=input_height,
first_conv=False,
maxpool1=False
)
# distribution parameters
self.fc_mu = nn.Linear(enc_out_dim, latent_dim)
self.fc_var = nn.Linear(enc_out_dim, latent_dim)
# for the gaussian likelihood
self.log_scale = nn.Parameter(torch.Tensor([0.0]))
def configure_optimizers(self):
return torch.optim.Adam(self.parameters(), lr=1e-4)
def gaussian_likelihood(self, x_hat, logscale, x):
scale = torch.exp(logscale)
mean = x_hat
dist = torch.distributions.Normal(mean, scale)
# measure prob of seeing image under p(x|z)
log_pxz = dist.log_prob(x)
return log_pxz.sum(dim=(1, 2, 3))
def kl_divergence(self, z, mu, std):
# --------------------------
# Monte carlo KL divergence
# --------------------------
# 1. define the first two probabilities (in this case Normal for both)
p = torch.distributions.Normal(torch.zeros_like(mu), torch.ones_like(std))
q = torch.distributions.Normal(mu, std)
# 2. get the probabilities from the equation
log_qzx = q.log_prob(z)
log_pz = p.log_prob(z)
# kl
kl = (log_qzx - log_pz)
kl = kl.sum(-1)
return kl
def training_step(self, batch, batch_idx):
x, _ = batch
# encode x to get the mu and variance parameters
x_encoded = self.encoder(x)
mu, log_var = self.fc_mu(x_encoded), self.fc_var(x_encoded)
# sample z from q
std = torch.exp(log_var / 2)
q = torch.distributions.Normal(mu, std)
z = q.rsample()
# decoded
x_hat = self.decoder(z)
# reconstruction loss
recon_loss = self.gaussian_likelihood(x_hat, self.log_scale, x)
# kl
kl = self.kl_divergence(z, mu, std)
# elbo
elbo = (kl - recon_loss)
elbo = elbo.mean()
self.log_dict({
'elbo': elbo,
'kl': kl.mean(),
'recon_loss': recon_loss.mean(),
'reconstruction': recon_loss.mean(),
'kl': kl.mean(),
})
return elbo
def train():
parser = ArgumentParser()
parser.add_argument('--gpus', type=int, default=None)
parser.add_argument('--dataset', type=str, default='cifar10')
args = parser.parse_args()
if args.dataset == 'cifar10':
dataset = CIFAR10DataModule('.')
if args.dataset == 'imagenet':
dataset = ImagenetDataModule('.')
sampler = ImageSampler()
vae = VAE()
trainer = pl.Trainer(gpus=args.gpus, max_epochs=20, callbacks=[sampler])
trainer.fit(vae, dataset)
if __name__ == '__main__':
train()
| williamFalcon/pytorch-lightning-vae | 211 | VAE for color images | Python | williamFalcon | William Falcon | Lightning AI |
babel.config.js | JavaScript | module.exports = {
presets: [
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
}
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
generators/plopfile.js | JavaScript | module.exports = (plop) => {
plop.setGenerator('kata', {
description: 'Create a kata',
prompts: [
{
type: 'input',
name: 'name',
message: 'What is your kata exercise name? Without spaces and symbols.',
},
],
actions: [
{
type: 'add',
path: '../src/{{name}}/index.ts',
templateFile: 'templates/index.ts.hbs',
},
{
type: 'add',
path: '../src/{{name}}/README.md',
templateFile: 'templates/README.md.hbs',
},
{
type: 'add',
path: '../src/{{name}}/test.ts',
templateFile: 'templates/test.ts.hbs',
},
],
})
}
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
jest.config.js | JavaScript | /*
* For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/tmp/jest_rs",
// Automatically clear mock calls and instances between every test
// clearMocks: false,
// Indicates whether the coverage information should be collected while executing the test
collectCoverage: true,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files
coverageDirectory: "coverage",
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// Indicates which provider should be used to instrument code for coverage
// coverageProvider: "babel",
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {},
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "jsx",
// "ts",
// "tsx",
// "json",
// "node"
// ],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
// moduleNameMapper: {},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
// preset: undefined,
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state between every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state between every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
// setupFilesAfterEnv: [],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
// testEnvironment: "jest-environment-node",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
// testMatch: [
// "**/__tests__/**/*.[jt]s?(x)",
// "**/?(*.)+(spec|test).[tj]s?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "/node_modules/"
// ],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jest-circus/runner",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
// transform: undefined,
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/",
// "\\.pnp\\.[^\\/]+$"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
src/01-square-digits/index.ts | TypeScript | export default function squareDigits(num: number) {
return +num
.toString()
.split('')
.map((n) => Math.pow(+n, 2))
.join('')
}
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
src/01-square-digits/test.ts | TypeScript | import squareDigits from '.'
describe('squareDigits', () => {
it('should square digits and concatenate', () => {
expect(squareDigits(9119)).toBe(811181)
expect(squareDigits(0)).toBe(0)
})
})
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
src/02-smallest-number/index.ts | TypeScript | export default function smallestNumber(numbers: number[]) {
return Math.min(...numbers)
}
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
src/02-smallest-number/test.ts | TypeScript | import smallestNumber from '.'
describe('02-smallest-number', () => {
it('should return the smallest number', () => {
expect(smallestNumber([34, 15, 88, 2])).toBe(2)
expect(smallestNumber([34, -345, -1, 100])).toBe(-345)
})
})
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen | |
src/03-even-or-odd/index.ts | TypeScript | export default function evenOrOdd(n: number): string {
return n % 2 === 0 ? 'Even' : 'Odd'
}
| willianjusten/kata-playground-ts | 22 | A simple playground to create and test your Katas in Typescript. | JavaScript | willianjusten | Willian Justen |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.