Compare commits

...

2 Commits

Author SHA1 Message Date
3c2d07c7d4
style: remove draft property
Some checks failed
Deploy / build-and-test (push) Failing after 1m16s
2025-03-12 18:46:05 +00:00
308f4008b5
feat: initial blog 2025-03-12 18:45:23 +00:00
31 changed files with 1554 additions and 302 deletions

View File

@ -0,0 +1,35 @@
name: Deploy
on:
push:
branches:
- main
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@v4
with:
submodules: true
- name: Check 🔍
uses: zolacti/on@check
with:
drafts: true
- name: Build 🛠
uses: zolacti/on@build
- name: Deploy 🚀
uses: appleboy/scp-action@v0.1.7
with:
host: ${{ vars.ATLAS_SSH_HOST }}
username: ${{ vars.ATLAS_SSH_USERNAME }}
key: ${{ secrets.ATLAS_SSH_KEY }}
port: ${{ vars.ATLAS_SSH_PORT }}
source: public
target: /www/blog
rm: true
overwrite: true
strip_components: 1

View File

@ -69,7 +69,7 @@ function has_body_changes() {
# Function to update the social media card for a post or section. # Function to update the social media card for a post or section.
function generate_and_commit_card { function generate_and_commit_card {
local file=$1 local file=$1
social_media_card=$(social-cards-zola -o static/img/social_cards -b http://127.0.0.1:1111 -u -p -i "$file") || { social_media_card=$(./static/code/social-cards-zola -o static/img/social_cards -b http://127.0.0.1:1111 -u -p -i "$file") || {
echo "Failed to update social media card for $file" echo "Failed to update social media card for $file"
exit 1 exit 1
} }
@ -165,18 +165,17 @@ for file in "${all_changed_files[@]}"; do
continue continue
fi fi
# If the file is an .md file and it's a draft, abort the commit. # # If the file is an .md file and it's a draft, abort the commit.
if [[ "$file" == *.md ]]; then # if [[ "$file" == *.md ]]; then
if is_draft "$file"; then # if is_draft "$file"; then
error_exit "Draft file $file is being committed!" # error_exit "Draft file $file is being committed!"
fi # fi
fi # fi
done done
# Use `social-cards-zola` to create/update the social media card for Markdown files. # Use `social-cards-zola` to create/update the social media card for Markdown files.
# See https://osc.garden/blog/automating-social-media-cards-zola/ for context. # See https://osc.garden/blog/automating-social-media-cards-zola/ for context.
changed_md_files=$(echo "$all_changed_files" | grep '\.md$') changed_md_files=$(echo "$all_changed_files" | grep '\.md$')
echo "Changed Markdown files: $changed_md_files"
# Use parallel to create the social media cards in parallel and commit them. # Use parallel to create the social media cards in parallel and commit them.
if [[ -n "$changed_md_files" ]]; then if [[ -n "$changed_md_files" ]]; then
echo "$changed_md_files" | parallel -j 8 generate_and_commit_card echo "$changed_md_files" | parallel -j 8 generate_and_commit_card

View File

@ -1,228 +0,0 @@
#!/usr/bin/env bash
set -eo pipefail
# This script takes a markdown post, crafts the corresponding URL, checks if it's accessible,
# takes a screenshot, and saves it to a specified location.
# It can update the front matter of the post with the path to the generated image (-u | --update-front-matter option).
# It's meant to be used as a pre-commit hook to generate social media cards for Zola sites using the tabi theme.
# More details: https://osc.garden/blog/automating-social-media-cards-zola/
function help_function(){
echo "This script automates the creation of social media cards for Zola websites."
echo "It takes a Markdown post and saves its live screenshot to a specified location."
echo ""
echo "IMPORTANT! It needs to be run from the root of the Zola site."
echo ""
echo "Usage: social-cards-zola [OPTIONS]"
echo ""
echo "Options:"
echo " -h, --help Show this help message and exit."
echo " -b, --base_url URL The base URL where the Zola site is hosted. Default is http://127.0.0.1:1111."
echo " -i, --input INPUT_PATH The relative path to the markdown file of the post/section you want to capture. Should be in the format 'content/blog/post_name.language.md'."
echo " -k, --key KEY The front matter key to update. Default is 'social_media_card'."
echo " -o, --output_path PATH The directory where the generated image will be saved."
echo " -p, --print_output Print the path to the resulting screenshot at the end."
echo " -u, --update-front-matter Update or add the 'social_media_card' key in the front matter of the Markdown file."
echo
echo "Examples:"
echo " social-cards-zola --base_url https://example.com --input content/blog/my_post.md --output_path static/img/social_cards"
echo " social-cards-zola -u -b http://127.0.0.1:1025 -i content/archive/_index.es.md -o static/img"
exit 0
}
function convert_filename_to_url() {
# Remove .md extension.
local post_name="${1%.md}"
# Remove "content/" prefix.
local url="${post_name#content/}"
# Extract language code.
local lang_code="${url##*.}"
if [[ "$lang_code" == "$url" ]]; then
lang_code="" # No language code.
else
lang_code="${lang_code}/" # Add trailing slash.
url="${url%.*}" # Remove the language code from the URL.
fi
# Handle co-located index.md by stripping it and using the directory as the URL.
if [[ "$url" == */index ]]; then
url="${url%/*}" # Remove the /index suffix.
fi
# Remove "_index" suffix.
if [[ "$url" == *"_index"* ]]; then
url="${url%%_index*}"
fi
# Return the final URL with a single trailing slash.
full_url="${lang_code}${url}"
echo "${full_url%/}/"
}
function error_exit() {
echo "ERROR: $1" >&2
exit "${2:-1}"
}
function validate_input_params() {
missing_params=()
if [[ -z "$base_url" ]]; then
missing_params+=("base_url")
fi
if [[ -z "$input" ]]; then
missing_params+=("input")
fi
if [[ -z "$output_path" ]]; then
missing_params+=("output_path")
fi
if [ ${#missing_params[@]} -ne 0 ]; then
error_exit "The following required settings are missing: ${missing_params[*]}. Use -h or --help for usage."
fi
}
function check_dependencies() {
for cmd in "curl" "shot-scraper"; do
if ! command -v $cmd &> /dev/null; then
error_exit "$cmd could not be found. Please install it."
fi
done
}
function fetch_status() {
local retry_count=0
local max_retries=5
local status
while [[ $retry_count -lt $max_retries ]]; do
status=$(curl -s -o /dev/null -I -w "%{http_code}" "${base_url}${post_url}")
if [[ "$status" -eq "200" ]]; then
return
fi
retry_count=$((retry_count + 1))
sleep 2
done
error_exit "Post $input is not accessible. Max retries ($max_retries) reached."
}
function capture_screenshot() {
temp_file=$(mktemp /tmp/social-zola.XXXXXX)
trap 'rm -f "$temp_file"' EXIT
shot-scraper --silent "${base_url}/${post_url}" -w 700 -h 400 --retina --quality 60 -o "$temp_file"
}
function move_file() {
local safe_filename=$(echo "${post_url%/}" | sed 's/[^a-zA-Z0-9]/_/g')
# Create the output directory if it doesn't exist.
mkdir -p "$output_path"
image_filename="${output_path}/${safe_filename:-index}.jpg" # If the filename is empty, use "index".
mv "$temp_file" "$image_filename" || error_exit "Failed to move the file to $image_filename"
}
function update_front_matter {
local md_file_path="$1"
local image_output="${2#static/}"
# Temporary file for awk processing
temp_awk=$(mktemp /tmp/frontmatter.XXXXXX)
awk -v card_path="$image_output" '
# Initialize flags for tracking state.
BEGIN { in_extra=done=front_matter=extra_exists=0; }
# Function to insert the social_media_card path.
function insert_card() { print "social_media_card = \"" card_path "\""; done=1; }
{
# If card has been inserted, simply output remaining lines.
if (done) { print; next; }
# Toggle front_matter flag at its start, denoted by +++
if (/^\+\+\+/ && front_matter == 0) {
front_matter = 1;
print "+++";
next;
}
# Detect [extra] section and set extra_exists flag.
if (/^\[extra\]/) { in_extra=1; extra_exists=1; print; next; }
# Update existing social_media_card.
if (in_extra && /^social_media_card =/) { insert_card(); in_extra=0; next; }
# End of front matter or start of new section.
if (in_extra && (/^\[[a-zA-Z_-]+\]/ || (/^\+\+\+/ && front_matter == 1))) {
insert_card(); # Add the missing social_media_card.
in_extra=0;
}
# Insert missing [extra] section.
if (/^\+\+\+/ && front_matter == 1 && in_extra == 0 && extra_exists == 0) {
print "\n[extra]";
insert_card();
in_extra=0;
front_matter = 0;
print "+++";
next;
}
# Print all other lines as-is.
print;
}' "$md_file_path" > "$temp_awk"
# Move the temporary file back to the original markdown file.
mv "$temp_awk" "$md_file_path"
}
function main() {
while [[ "$#" -gt 0 ]]; do
case "$1" in
-h|--help)
help_function;;
-b|--base_url)
base_url="$2"
shift 2;;
-i|--input)
input="$2"
shift 2;;
-o|--output_path)
output_path="$2"
shift 2;;
-k|--key)
front_matter_key="$2"
shift 2;;
-u|--update-front-matter)
update="true"
shift 1;;
-p|--print_output)
print_output="true"
shift 1;;
*)
error_exit "Unknown option: $1";;
esac
done
validate_input_params
check_dependencies
: "${base_url:="http://127.0.0.1:1111"}"
: "${front_matter_key:="social_media_card"}"
base_url="${base_url%/}/" # Ensure one trailing slash.
post_url="$(convert_filename_to_url "$input")"
fetch_status
capture_screenshot
move_file
if [[ "$update" == "true" ]]; then
update_front_matter "$input" "$image_filename"
fi
if [[ "$print_output" == "true" ]]; then
echo "$image_filename"
fi
}
main "$@"

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
public
.idea

6
.idea/vcs.xml generated
View File

@ -1,5 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="CommitMessageInspectionProfile">
<profile version="1.0">
<inspection_tool class="CommitFormat" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="CommitNamingConvention" enabled="true" level="WARNING" enabled_by_default="true" />
</profile>
</component>
<component name="VcsDirectoryMappings"> <component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" /> <mapping directory="$PROJECT_DIR$" vcs="Git" />
<mapping directory="$PROJECT_DIR$/themes/tabi" vcs="Git" /> <mapping directory="$PROJECT_DIR$/themes/tabi" vcs="Git" />

5
README.md Normal file
View File

@ -0,0 +1,5 @@
# My Blog
Welcome to my blog! Here I write about things I find interesting, like programming, science, and philosophy.
It's available at [aldofunes.com](https://www.aldofunes.com).

View File

@ -1,39 +1,563 @@
# The URL the site will be built for # The base URL of the site; the only required configuration variable.
base_url = "https://www.aldofunes.com" base_url = "https://www.aldofunes.com"
default_language = "en"
build_search_index = true # The site title and description; used in feeds by default.
theme = "tabi"
title = "Aldo Funes" title = "Aldo Funes"
taxonomies = [{name = "tags", feed = true}] description = "Welcome to my personal blog!"
# The default language; used in feeds.
default_language = "en"
# The site theme to use.
theme = "tabi"
# For overriding the default output directory `public`, set it to another value (e.g.: "docs")
output_dir = "public"
# Whether dotfiles at the root level of the output directory are preserved when (re)building the site.
# Enabling this also prevents the deletion of the output folder itself on rebuilds.
preserve_dotfiles_in_output = false
# When set to "true", the Sass files in the `sass` directory in the site root are compiled.
# Sass files in theme directories are always compiled.
compile_sass = false
# When set to "true", the generated HTML files are minified.
minify_html = true
# A list of glob patterns specifying asset files to ignore when the content
# directory is processed. Defaults to none, which means that all asset files are
# copied over to the `public` directory.
# Example:
# ignored_content = ["*.{graphml,xlsx}", "temp.*", "**/build_folder"]
ignored_content = []
# Similar to ignored_content, a list of glob patterns specifying asset files to
# ignore when the static directory is processed. Defaults to none, which means
# that all asset files are copied over to the `public` directory
ignored_static = []
# When set to "true", a feed is automatically generated.
generate_feeds = true
# When set to "all", paginated pages are not a part of the sitemap, default is "none"
exclude_paginated_pages_in_sitemap = "none"
# The filenames to use for the feeds. Used as the template filenames, too.
# Defaults to ["atom.xml"], which has a built-in template that renders an Atom 1.0 feed.
# There is also a built-in template "rss.xml" that renders an RSS 2.0 feed.
feed_filenames = ["atom.xml"]
# The number of articles to include in the feed. All items are included if
# this limit is not set (the default).
# feed_limit = 20
# When set to "true", files in the `static` directory are hard-linked. Useful for large
# static files. Note that for this to work, both `static` and the
# output directory need to be on the same filesystem. Note that the theme's `static`
# files are always copied, regardless of this setting.
hard_link_static = false
# The default author for pages
author = "Aldo Funes"
# The taxonomies to be rendered for the site and their configuration of the default languages
# Example:
# taxonomies = [
# {name = "tags", feed = true}, # each tag will have its own feed
# {name = "tags"}, # you can have taxonomies with the same name in multiple languages
# {name = "categories", paginate_by = 5}, # 5 items per page for a term
# {name = "authors"}, # Basic definition: no feed or pagination
# ]
#
taxonomies = [{ name = "tags" }]
# When set to "true", a search index is built from the pages and section
# content for `default_language`.
build_search_index = true
# When set to "false", Sitemap.xml is not generated
generate_sitemap = true
# When set to "false", robots.txt is not generated
generate_robots_txt = true
# Configuration of the Markdown rendering
[markdown]
# When set to "true", all code blocks are highlighted.
highlight_code = true
# When set to "true", missing highlight languages are treated as errors. Defaults to false.
error_on_missing_highlight = false
# A list of directories used to search for additional `.sublime-syntax` and `.tmTheme` files.
extra_syntaxes_and_themes = []
# To use a Zola built-in theme, CSP needs to allow unsafe-inline for style-src.
highlight_theme = "css"
highlight_themes_css = [{ theme = "dracula", filename = "css/syntax.css" }]
# When set to "true", emoji aliases translated to their corresponding
# Unicode emoji equivalent in the rendered Markdown files. (e.g.: :smile: => 😄)
render_emoji = false
# CSS class to add to external links (e.g. "external-link")
external_links_class = "external"
# Whether external links are to be opened in a new tab
# If this is true, a `rel="noopener"` will always automatically be added for security reasons
external_links_target_blank = true
# Whether to set rel="nofollow" for all external links
external_links_no_follow = false
# Whether to set rel="noreferrer" for all external links
external_links_no_referrer = false
# Whether smart punctuation is enabled (changing quotes, dashes, dots in their typographic form)
# For example, `...` into `…`, `"quote"` into `“curly”` etc
smart_punctuation = true
# Whether parsing of definition lists is enabled
definition_list = false
# Whether to set decoding="async" and loading="lazy" for all images
# When turned on, the alt text must be plain text.
# For example, `![xx](...)` is ok but `![*x*x](...)` isnt ok
lazy_async_image = false
# Whether footnotes are rendered in the GitHub-style (at the bottom, with back references) or plain (in the place, where they are defined)
bottom_footnotes = true
# This determines whether to insert a link for each header like the ones you can see on this site if you hover over
# a header.
# The default template can be overridden by creating an `anchor-link.html` file in the `templates` directory.
# This value can be "left", "right", "heading" or "none".
# "heading" means the full heading becomes the text of the anchor.
# See "Internal links & deep linking" in the documentation for more information.
insert_anchor_links = "none"
# Configuration of the link checker.
[link_checker]
# Skip link checking for external URLs that start with these prefixes
skip_prefixes = [
"http://[2001:db8::]/",
]
# Skip anchor checking for external URLs that start with these prefixes
skip_anchor_prefixes = [
"https://caniuse.com/",
]
# Treat internal link problems as either "error" or "warn", default is "error"
internal_level = "error"
# Treat external link problems as either "error" or "warn", default is "error"
external_level = "error"
# Various slugification strategies, see below for details
# Defaults to everything being a slug
[slugify]
paths = "on"
taxonomies = "on"
anchors = "on"
# Whether to remove date prefixes for page path slugs.
# For example, content/posts/2016-10-08_a-post-with-dates.md => posts/a-post-with-dates
# When true, content/posts/2016-10-08_a-post-with-dates.md => posts/2016-10-08-a-post-with-dates
paths_keep_dates = false
[search]
# Whether to include the title of the page/section in the index
include_title = true
# Whether to include the description of the page/section in the index
include_description = false
# Whether to include the RFC3339 datetime of the page in the search index
include_date = false
# Whether to include the path of the page/section in the index (the permalink is always included)
include_path = false
# Whether to include the rendered content of the page/section in the index
include_content = true
# At which code point to truncate the content to. Useful if you have a lot of pages and the index would
# become too big to load on the site. Defaults to not being set.
# truncate_content_length = 100
# Whether to produce the search index as a javascript file or as a JSON file
# Accepted values:
# - "elasticlunr_javascript", "elasticlunr_json"
# - "fuse_javascript", "fuse_json"
index_format = "elasticlunr_json"
# Optional translation object for the default language
# Example:
# default_language = "fr"
#
# [translations]
# title = "Un titre"
#
[translations]
# Additional languages definition
# You can define language specific config values and translations:
# title, description, generate_feeds, feed_filenames, taxonomies, build_search_index
# as well as its own search configuration and translations (see above for details on those)
[languages]
# For example
# [languages.fr]
# title = "Mon blog"
# generate_feeds = true
# taxonomies = [
# {name = "auteurs"},
# {name = "tags"},
# ]
# build_search_index = false
[languages.es] [languages.es]
title = "Aldo Funes" title = "Aldo Funes"
taxonomies = [{name = "tags", feed = true}] generate_feeds = true
taxonomies = [{ name = "tags" }]
[languages.pt-PT] build_search_index = true
title = "Aldo Funes"
taxonomies = [{name = "tags", feed = true}]
[markdown]
# Whether to do syntax highlighting
# Theme can be customised by setting the `highlight_theme` variable to a theme supported by Zola
highlight_code = true
#highlight_theme = "dracula"
highlight_theme = "css"
highlight_themes_css = [
{ theme = "dracula", filename = "css/syntax.css" },
]
external_links_class = "external"
#[languages.pt-PT]
#title = "Aldo Funes"
#generate_feeds = true
#taxonomies = [{ name = "tags" }]
#build_search_index = true
[extra] [extra]
# Put all your custom variables here # Check out the documentation (or the comments below) to learn how to customise tabi:
# https://welpo.github.io/tabi/blog/mastering-tabi-settings/
# Use sans-serif font everywhere.
# By default, the serif font is only used in articles.
override_serif_with_sans = false
# Enable JavaScript theme toggler to allow users to switch between dark/light mode.
# If disabled, your site will use the theme specified in the `default_theme` variable.
theme_switcher = true
# This setting determines the default theme on load ("light" or "dark").
# To follow the user's OS theme, leave it empty or unset.
default_theme = ""
# Choose the colourscheme (skin) for the theme. Default is "teal".
# Skin available: blue, lavender, mint, red, sakura, teal, monochrome, lowcontrast_orange, lowcontrast_peach, lowcontrast_pink, indigo_ingot, evangelion
# See them live and learn how to create your own: https://welpo.github.io/tabi/blog/customise-tabi/#skins
# WARNING! "lowcontrast" skins, while aesthetically pleasing, may not provide optimal
# contrast (in light theme) for readability and might not be suitable for all users.
# Furthermore, low contrasting elements will affect your Google Lighthouse rating.
# All other skins have optimal contrast.
skin = ""
# Set browser theme colour. Can be a single colour or [light, dark].
# Note: Bright colors may be ignored in dark mode.
# More details: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta/name/theme-color
# browser_theme_color = "#087e96" # Example of single value.
# browser_theme_color = ["#ffffff", "#000000"] # Example of light/dark colours.
# List additional stylesheets to load site-wide.
# These stylesheets should be located in your site's `static` directory.
# Example: stylesheets = ["extra1.css", "path/extra2.css"]
# You can load a stylesheet for a single post by adding it to the [extra] section of the post's front matter, following this same format.
stylesheets = ["css/syntax.css"] stylesheets = ["css/syntax.css"]
remote_repository_url = "https://github.com/aldofunes/blog" # Sets the default canonical URL for all pages.
remote_repository_git_platform = "auto" # Individual pages can override this in the [extra] section using canonical_url.
remote_repository_branch = "main" # Example: "$base_url/blog/post1" will get the canonical URL "https://example.com/blog/post1".
show_remote_changes = true # Note: To ensure accuracy in terms of matching content, consider setting 'canonical_url' individually per page.
show_remote_source = true # base_canonical_url = "https://example.com"
favicon_emoji = "👾" # Remote repository for your Zola site.
# Used for `show_remote_changes` and `show_remote_source` (see below).
# Supports GitHub, GitLab, Gitea, and Codeberg.
remote_repository_url = "https://gitea.funes.me/aldo/blog"
# Set this to "auto" to try and auto-detect the platform based on the repository URL.
# Accepted values are "github", "gitlab", "gitea", and "codeberg".
remote_repository_git_platform = "gitea" # Defaults to "auto".
# Branch in the repo hosting the Zola site.
remote_repository_branch = "main" # Defaults to "main".
# Show a link to the commit history of updated posts, right next to the last updated date.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
show_remote_changes = true # Defaults to true.
# Show a link to the repository of the site, right next to the "Powered by Zola & tabi" text.
show_remote_source = true # Defaults to true.
# Add a "copy" button to codeblocks (loads ~700 bytes of JavaScript).
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
copy_button = true
# Make code block names clickable if they are URLs (loads ~400 bytes of JavaScript).
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
code_block_name_links = false
# Force left-to-right (LTR) direction for code blocks.
# Set to false to allow code to follow the document's natural direction.
# Can be set at page or section levels. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
force_codeblock_ltr = true
# Show the author(s) of a page.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
show_author = false
# Show the reading time of a page.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
show_reading_time = true
# Show the date of a page below its title.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
show_date = true
# Determines how dates are displayed in the post listing (e.g. front page or /blog). Options:
# "date" - Show only the original date of the post (default if unset).
# "updated" - Show only the last updated date of the post. If there is no last updated date, it shows the original date.
# "both" - Show both the original date and the last updated date.
post_listing_date = "date"
# Show "Jump to posts" link next to series' title.
# By default, the link appears automatically when a series description exceeds 2000 characters.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
# show_jump_to_posts = true
# Determines if indexes should be increasing (false) or decreasing (true) in series' posts list.
# It has only effect if the section uses indexes metadata (which is only the case for series as of now).
# Can be set at section levels, following the hierarchy: section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
post_listing_index_reversed = false # Defaults to false.
# Enable KaTeX for all posts.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
katex = false
# Enable Mermaid diagrams for all posts.
# Loads ~2.5MB of JavaScript.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
mermaid = true
# Serve Mermaid JavaScript locally. Version bundled with tabi.
# If set to false, it will load the latest version from JSDelivr.
# Only relevant when `mermaid = true`.
serve_local_mermaid = true
# Show links to previous and next articles at the bottom of posts.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
show_previous_next_article_links = true
# Invert order of the links to previous and next articles at the bottom of posts.
# By default, next articles are on the left side of the page and previous articles are on the right side.
# To reverse the order (next articles on the right and previous articles on the left), set it to true.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
invert_previous_next_article_links = false
# Whether the navigation for previous/next article should match the full width of the site (same as the navigation bar at the top) or the article width.
# To match the navigation bar at the top, set it to true.
previous_next_article_links_full_width = true
# Quick navigation buttons.
# Adds "go up" and "go to comments" buttons on the bottom right (hidden for mobile).
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
quick_navigation_buttons = true
# Add a Table of Contents to posts, right below the title and metadata.
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
toc = true
# Date format used when listing posts (main page, /blog section, tag posts list…)
# Default is "6th July 2049" in English and "%d %B %Y" in other languages.
# long_date_format = "%d %B %Y"
# Date format used for blog posts.
# Default is "6th July 2049" in English and "%-d %B %Y" in other languages.
short_date_format = ""
# Custom separator used in title tag and posts metadata (between date, time to read, and tags).
separator = "•"
# Use a shorter layout for All tags listing.
# Default: tag_name n post[s]
# Compact: tag_name^n (superscript number)
compact_tags = false
# How tags are sorted in a Tags listing based on templates/tags/list.html.
# "name" for alphabetical, "frequency" for descending count of posts.
# Default: "name".
tag_sorting = "name"
# Show clickable tags above cards.html template (e.g. projects/) to filter the displayed items.
# Loads JS to filter. If JS is disabled, the buttons are links to the tag's page.
# Can be set at the section or config.toml level, following the hierarchy: section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
# Default: true
enable_cards_tag_filtering = true
# Invert the order of the site title and page title in the browser tab.
# Example: true => "Blog • ~/tabi", false => "~/tabi • Blog"
invert_title_order = false
# Full path after the base URL required. So if you were to place it in "static" it would be "/favicon.ico"
# favicon = ""
# Add an emoji here to use it as favicon.
# Compatibility: https://caniuse.com/link-icon-svg
favicon_emoji = "👾"
# Path to the fallback image for social media cards (the preview image shown when sharing a link on WhatsApp, LinkedIn…).
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
# Learn how to create these images in batch and automatically:
# https://osc.garden/blog/automating-social-media-cards-zola/
# social_media_card = ""
menu = [
{ name = "about", url = "about", trailing_slash = true },
{ name = "blog", url = "blog", trailing_slash = true },
{ name = "archive", url = "archive", trailing_slash = true },
{ name = "tags", url = "tags", trailing_slash = true },
{ name = "categories", url = "categories", trailing_slash = true },
{ name = "projects", url = "projects", trailing_slash = true },
]
# The RSS icon will be shown if (1) it's enabled and (2) the following variable is set to true.
# Note for Zola 0.19.X users: when `feed_filenames` has two filenames, only the first one will be linked in the footer.
feed_icon = true
# Show the full post content in the Atom feed.
# If it's set to false, only the description or summary will be shown.
full_content_in_feed = true
# Email address for footer's social section.
# Protect against spambots:
# 1. Use base64 for email (convert at https://www.base64encode.org/ or `printf 'your@email.com' | base64`).
# 2. Or, set 'encode_plaintext_email' to true for auto-encoding (only protects on site, not in public repos).
email = "YWxkb0BmdW5lcy5tZQ=="
# Decoding requires ~400 bytes of JavaScript. If JS is disabled, the email won't be displayed.
encode_plaintext_email = true # Setting is ignored if email is already encoded.
# Social media links for the footer.
# Built-in icons: https://github.com/welpo/tabi/tree/main/static/social_icons
# To use a custom icon, add it to your site's `static/social_icons` directory.
socials = [
{ name = "github", url = "https://github.com/aldofunes/", icon = "github" },
{ name = "gitea", url = "https://gitea.funes.me/aldo", icon = "gitea" },
{ name = "linkedin", url = "https://www.linkedin.com/in/aldo-funes", icon = "linkedin" },
{ name = "mastodon", url = "https://techhub.social/@aldofunes", icon = "mastodon" },
]
# Fediverse profile.
# Adds metadata to feature the author's profile in Mastodon link previews.
# Example: for @username@example.com, use:
fediverse_creator = { handle = "aldofunes", domain = "techhub.social" }
# Extra menu to show on the footer, below socials section.
footer_menu = [
{ url = "https://cal.com/aldofunes", name = "calendar", trailing_slash = false },
{ url = "sitemap.xml", name = "sitemap", trailing_slash = false },
]
# Enable a copyright notice for the footer, shown between socials and the "Powered by" text.
# $TITLE will be replaced by the website's title.
# $CURRENT_YEAR will be replaced by the current year.
# $AUTHOR will be replaced by the `author` variable.
# $SEPARATOR will be replaced by the `separator` variable.
# Markdown is supported (links, emphasis, etc).
# copyright = "$TITLE © $CURRENT_YEAR $AUTHOR $SEPARATOR Unless otherwise noted, the content in this website is available under the [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) license."
# For multi-language sites, you can set a different copyright for each language.
# The old way of setting `translated_copyright = true` and using i18n files is deprecated.
# If a translation is missing for language, the `copyright` value will be used.
# copyright_translations.es = "$TITLE © $CURRENT_YEAR $AUTHOR $SEPARATOR A menos que se indique lo contrario, el contenido de esta web está disponible bajo la licencia [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/)."
# Custom security headers. What urls should your website be able to connect to?
# You need to specify the CSP and the URLs associated with the directive.
# Useful if you want to load remote content safely (embed YouTube videos, which needs frame-src, for example).
# Default directive is self.
# Default config, allows for https remote images and embedding YouTube and Vimeo content.
# This configuration (along with the right webserver settings) gets an A+ in Mozilla's Observatory: https://observatory.mozilla.org
# Note: to use a Zola built-in syntax highlighting theme, allow unsafe-inline for style-src.
allowed_domains = [
{ directive = "font-src", domains = ["'self'", "data:"] },
{ directive = "img-src", domains = ["'self'", "https://*", "data:"] },
{ directive = "media-src", domains = ["'self'"] },
{ directive = "script-src", domains = ["'self'"] },
{ directive = "style-src", domains = ["'self'"] },
{ directive = "frame-src", domains = ["player.vimeo.com", "https://www.youtube-nocookie.com"] },
]
# Enable the CSP directives configured (or default).
# Can be set at page or section levels, following the hierarchy: page > section > config. See: https://welpo.github.io/tabi/blog/mastering-tabi-settings/#settings-hierarchy
enable_csp = true
# Custom subset of characters for the header.
# If set to true, the `static/custom_subset.css` file will be loaded first.
# This avoids a flashing text issue in Firefox.
# Please see https://welpo.github.io/tabi/blog/custom-font-subset/ to learn how to create this file.
custom_subset = true
[extra.analytics]
# Specify which analytics service you want to use.
# Supported options: ["goatcounter", "umami", "plausible"]
service = "plausible"
# Unique identifier for tracking.
# For GoatCounter, this is the code you choose during signup.
# For Umami, this is the website ID.
# For Plausible, this is the domain name (e.g. "example.com").
# Note: Leave this field empty if you're self-hosting GoatCounter.
id = "funes.me"
# Optional: Specify the URL for self-hosted analytics instances.
# For GoatCounter: Base URL like "https://stats.example.com"
# For Umami: Base URL like "https://umami.example.com"
# For Plausible: Base URL like "https://plausible.example.com"
# Leave this field empty if you're using the service's default hosting.
self_hosted_url = "https://plausible.funes.me"
# giscus support for comments. https://giscus.app
# Setup instructions: https://welpo.github.io/tabi/blog/comments/#setup
[extra.giscus]
# enabled_for_all_posts = false # Enables giscus on all posts. It can be enabled on individual posts by setting `giscus = true` in the [extra] section of a post's front matter.
# automatic_loading = true # If set to false, a "Load comments" button will be shown.
# repo = "welpo/tabi-comments"
# repo_id = "R_kgDOJ59Urw" # Find this value in https://giscus.app/
# category = "Announcements"
# category_id = "DIC_kwDOJ59Ur84CX0QG" # Find this value in https://giscus.app/
# mapping = "slug" # Available: pathname; url; title; slug. "slug" will use the post's filename (slug); this is the only way to share comments between languages.
# strict_title_matching = 1 # 1 to enable, 0 to disable. https://github.com/giscus/giscus/blob/main/ADVANCED-USAGE.md#data-strict
# enable_reactions = 1 # 1 to enable, 0 to disable.
# comment_box_above_comments = false
# light_theme = "noborder_light"
# dark_theme = "noborder_dark"
# lang = "" # Leave blank to match the page's language.
# lazy_loading = true
# utterances support for comments. https://utteranc.es
# Setup instructions: https://welpo.github.io/tabi/blog/comments/#setup
[extra.utterances]
# enabled_for_all_posts = false # Enables utterances on all posts. It can be enabled on individual posts by setting `utterances = true` in the [extra] section of a post's front matter.
# automatic_loading = true # If set to false, a "Load comments" button will be shown.
# repo = "yourGithubUsername/yourRepo" # https://utteranc.es/#heading-repository
# issue_term = "slug" # Available: pathname; url; title; slug. "slug" will use the post's filename (slug); this is the only way to share comments between languages. https://utteranc.es/#heading-mapping
# label = "💬" # https://utteranc.es/#heading-issue-label
# light_theme = "github-light" # https://utteranc.es/#heading-theme
# dark_theme = "photon-dark" # https://utteranc.es/#heading-theme
# lazy_loading = true
# Hyvor Talk support for comments. https://talk.hyvor.com
[extra.hyvortalk]
# enabled_for_all_posts = false # Enables hyvortalk on all posts. It can be enabled on individual posts by setting `hyvortalk = true` in the [extra] section of a post's front matter.
# automatic_loading = true # If set to false, a "Load comments" button will be shown.
# website_id = "1234"
# page_id_is_slug = true # If true, it will use the post's filename (slug) as id; this is the only way to share comments between languages. If false, it will use the entire url as id.
# lang = "" # Leave blank to match the page's language.
# page_author = "" # Email (or base64 encoded email) of the author.
# lazy_loading = true
# Isso support for comments. https://isso-comments.de/
# You need to self-host the backend first: https://blog.phusion.nl/2018/08/16/isso-simple-self-hosted-commenting-system/
# More info on some settings: https://isso-comments.de/docs/reference/client-config/
[extra.isso]
# enabled_for_all_posts = false # Enables Isso on all posts. It can be enabled on individual posts by setting `isso = true` in the [extra] section of a post's front matter.
# automatic_loading = true # If set to false, a "Load comments" button will be shown.
# endpoint_url = "" # Accepts relative paths like "/comments/" or "/isso/", as well as full urls like "https://example.com/comments/". Include the trailing slash.
# page_id_is_slug = true # If true, it will use the relative path for the default language as id; this is the only way to share comments between languages. If false, it will use the entire url as id.
# lang = "" # Leave blank to match the page's language.
# max_comments_top = "inf" # Number of top level comments to show by default. If some comments are not shown, an “X Hidden” link is shown.
# max_comments_nested = "5" # Number of nested comments to show by default. If some comments are not shown, an “X Hidden” link is shown.
# avatar = true
# voting = true
# page_author_hashes = "" # hash (or list of hashes) of the author.
# lazy_loading = true # Loads when the comments are in the viewport (using the Intersection Observer API).

View File

@ -1,12 +1,15 @@
+++ +++
title = "Home" title = "Inicio"
# Note we're not setting `paginate_by` here. # Note we're not setting `paginate_by` here.
[extra] [extra]
header = { title = "Hola! I'm aldo", img = "img/main.webp", img_alt = "Óscar Fernández, the theme's author" } header = { title = "¡Hola! Soy Aldo", img = "img/profile.jpg", img_alt = "Aldo Funes, the blog's author" }
#section_path = "blog/_index.md" # Where to find your posts. #section_path = "blog/_index.md" # Where to find your posts.
#max_posts = 5 # Show 5 posts on the home page. #max_posts = 5 # Show 5 posts on the home page.
+++ +++
# Welcome to your new blog! # ¡Bienvenido a mi blog!
Aquí escribo sobre cualquier cosa que me parezca interesante. Este blog es una forma de compartir mis pensamientos y experiencias con el
mundo. Espero que disfrutes leyéndolo tanto como yo disfruto escribiéndolo.

View File

@ -3,10 +3,13 @@ title = "Home"
# Note we're not setting `paginate_by` here. # Note we're not setting `paginate_by` here.
[extra] [extra]
header = { title = "Hello! I'm aldo", img = "img/main.webp", img_alt = "Óscar Fernández, the theme's author" } header = { title = "Welcome to my blog!", img = "img/profile.jpg", img_alt = "Aldo Funes, the blog's author" }
#section_path = "blog/_index.md" # Where to find your posts. section_path = "blog/_index.md" # Where to find your posts.
#max_posts = 5 # Show 5 posts on the home page. max_posts = 5 # Show 5 posts on the home page.
+++ +++
# Welcome to your new blog! # Welcome to my blog!
Here, I write about pretty much anything that I find interesting. This blog is a way for me to share my thoughts and experiences with the
world. I hope you enjoy reading it as much as I enjoy writing it.

View File

@ -1,4 +1,4 @@
+++ +++
title = "Archive" title = "Archive"
#template = "archive.html" template = "archive.html"
+++ +++

View File

@ -0,0 +1,25 @@
+++
title = "Why I quit social networks"
description = "I deleted all my social network accounts 4 months ago. It feels oddly liberating."
date = 2018-04-01
updated = 2025-03-12
[taxonomies]
tags = ["Social Networks", "Life Decisions"]
[extra]
social_media_card = "img/social_cards/blog_why_i_quit_social_networks.jpg"
+++
There was a smart guy called _Dunbar_. He studied some animals and came to the conclusion that a person's social circle is limited to its
brain's capacity to store information. In other words, we can only store so many individuals in our brain, the rest is just people. It may
even sound mean, but tu us, people are outside our caring boundaries. This fella studied several poeple and decided that our limit is 150.
What? We can only remember 150 people? But hey, I have like 1,000+ friends in Facebook alone! That cannot be true!
It is a little bit more complicated than that. Knowing someone is different than caring for her. Let's do a quick exercise, grab a pen and
paper, and (without looking to your Facebook) make a list of everyone you know. You may have super-memory, and I can still guarantee that
you will struggle to write 50 names, let alone 150. This number is much lower in practice.
So, what does this have to do with quitting social networks? Well, I realized that I was spending too much time on them. I am not a social
person. Actually, you may define me as introvert. So, being introverted

View File

@ -0,0 +1,207 @@
+++
title = "Chronicles of an Expedition to Peru"
description = ""
date = 2018-07-05
[taxonomies]
tags = ["alpinism", "expeditions"]
+++
Nuestro vuelo salió de México el miércoles. Hicimos una escala de unas dos horas en el aeropuerto de
El Salvador antes de tomar el vuelo que nos llevó a Lima. Saliendo del aeropuerto, como a las 2:00
AM tomamos un Uber que nos llevó a un hostal cerca de la terminal de camiones, donde logramos
dormir poco más de tres horas.
<!---
Fotos
Andrea Funes
Manuel Valencia
Yo
--->
Después de un buen baño, nos recogió el Uber con dirección a la terminal de Cruz del Sur, a 4 km.
del hostal. Ahí empezaron los problemas. La hora pico en Lima resultó ser igual de problemática que
la de la Ciudad de México. Bajando del uber, corrimos a la terminal, pues el camión salía 9:30 y ya
eran las 9:25 y no habíamos registrado ni pagado las maletas. Al final, el camión venía retrasado y
lo tuvimos que esperar, ya más relajados.
El viaje fue muy cómodo, como si hubiéramos viajado en avión de primera clase; comida y bebida a
bordo, y unos asientos increíblemente cómodos. En las ocho horas de camino me dio tiempo de leer,
dormir, comer, escuchar música, ver películas, dormir un poco más y ver el paisaje de la costa de
peruana.
Nos recogieron a las ocho de la mañana y tres horas más tarde llegamos a Cashapampa. La expedición
iniciamos con:
- Andrea
- Manuel
- Aldo
- Nehemio (El Demonio): guía
- Hernán (Huaypa): segundo guía
- Michel (Chara): cocinero
- Ezequiel (Binglis): porteador
- Álvaro: arriero
- Juanito: chofer
La caminata a Llamacorral nos tomó más a nosotros que a los burros con todas nuestras cosas que
usaríamos en los campos base. Normalmente en una expedición que organizamos nosotros, para este
momento llevamos una barrita, unas nueces y fruta deshidratada; comer comida real suele ser un lujo.
Sin embargo, Chara nos dio, a mitad de camino, una pieza de pollo asado con un poco de arroz verde.
En el campamento de Llamacorral tomamos el té, comimos un poco de pan y platicamos un rato. De vez
en cuando se escuchaba a Chara gritar _¡Aldo, chiste!_, a lo que tenía que responder con uno bueno
si quería tener derecho a cena.
Es buen momento para aclarar nuestro plan. El itinerario pactado con Nehemio quedó así:
1. Huaraz - Llamacorral
1. Llamacorral - Campo base del Alpamayo
1. Campo base - Campo alto
1. Campo alto - Cumbre del Alpamayo - Campo Alto
1. Campo alto - Cumbre del Quitaraju - Campo Alto
1. Campo alto - Campo base
1. Campo base - Campo base del Artesonraju
1. Campo base de Artesonraju - Campo morrena
1. Campo morrena - Cumbre del Artesonraju - Campo base
1. Campo base - Huaraz
Estando en el campo base del Alpamayo, nos enteramos de que el Alpamayo tenía, sobre la canaleta de
la ruta, unas cornisas enormes que podían caer en cualquier momento y convertirse en una avalancha
letal. Después, escuchamos de otro guía que intentó en Quitaraju, que se tuvo que bajar en el sexto
largo (de 10) porque la nieve estaba muy suelta y no permitía poner protecciones.
Hubo un accidente en el campo alto. Un alemán, tratando de recuperar una botella de agua, se cayó en
una grieta en el glaciar del campo alto. La verdad es que fue un acto estúpido, pero se golpeó la
cabeza y se rompió el fémur. La policía de montaña de Huaraz, según los guías, siempre se roban el
crédito de rescates como este. Los primeros en llegar son el cuerpo de rescate de la casa de guías,
y los policías llegan, se toman una foto y reportan que _después de X horas, rescataron a X
personas_.
Ya descartamos el Alpamayo, porque el riesgo es demasiado alto. Esperaremos a bajar para tener
noticias del Quitaraju para ver si vale la pena subir al campo alto. Mientras, la caminata de 6
horas al campo morrena del Artesonraju nos mantendrá ocupados. Hicimos una rápida parada en el
campamento base del Artesonraju para comer un poco y armar las mochilas con las que subiremos.
Ahora ya no tendremos burros que nos ayuden, por lo que cada gramo cuenta.
La subida es muy empinada y hace mucho calor. Subimos lento por las faldas de la montaña.
Además,Manu y yo llevamos nuestra cámara, dos lentes y un tripié cada quien. Nos consuela pensar en
las fotos de estrellas que vamos a tomar. Mirar al cielo nocturno y ver la Vía Láctea es una de las
experiencias más impresionantes de ir a una montaña. La altitud y la ausencia de contaminación
lumínica hacen que el cielo aparezca como en una película. La cantidad de estrellas que se ven me
hacen pensar en las civilizaciones antiguas, que basaban su vida en ellas.
_!Manu, chiste!_ Ahora le tocaba a Manuel contarle un chiste a Chara. Nos preparó una cena bastante
buena para estar a 5,000 metros sobre el nivel del mar. Cuando viajamos solos, para ahorrar peso,
llevamos bolsas de comida deshidratada; algo así como comida de astronauta. Hay de varias cosas:
lasagna bolognesa, arroz con carne, pollo thai, incluso hay postres como pie de queso. Esta vez
cenamos una sopita de verduras y un poco de pasta con salsa de jitomate. Es muy extraño ver en un
campamento así, una olla exprés; pero sino, por alguna razón Chara no se sentiría cómodo hirviendo
papas.
Nos despertamos a las once de la noche. Hicimos los últimos preparativos. Tomamos un rápido té para
entrar en calor. Nos equipamos y salimos con ansias de subir una de las montañas más técnicas de
Perú. Dos horas de morrena, tres de glaciar y cuatro de paredes de hielo; más o menos eso vamos a
hacer. La morrena se pasó rápido, el glaciar no estaba nada complicado. Nuestro sufrimiento empezó
cuando alcanzamos a los argentinos que habíamos conocido el día anterior.
Una de ellos era guía en el Aconcagua, que puede llegar a cuarenta bajo cero. El Arteson, con sus
diez bajo cero no presentaba un reto en cuanto a la temperatura para ellos. Salieron poco antes que
nosotros con la intención de que nuestro paso más lento no fuera problemático para nadie.
Una hora los tuvimos que esperar poco antes de iniciar el primer largo en la pared de hielo. Al
parecer no fueron tan rápidos o nosotros no fuimos tan lentos. El punto es que esperar una hora alas
cuatro de la mañana en medio de un glaciar a cinco mil quinientos metros nos enfrió mucho a pesar de
nuestras chamarras de pluma de ganso. Cuando finalmente liberaron la ruta y pudimos empezar a subir,
pensamos que el frío era suficiente como para no quitarnos las de pluma. Yo peleaba con el arnés
porque terminó debajo de la mía.
La nieve está en pésimas condiciones. El primer largo tiene hielo duro cubierto por una capa de
hielo poroso. Los piolets se clavan, pero al momento de jalar se caen bloques grandes de hielo
poroso, como si tuviera una capa de esponja quebradiza. Abajo de eso, el hielo duro rebotaba el
piolet. Solamente estoy logrando clavarlo unos milímetros, y no me da nada de confianza. La pared
tiene 80° de inclinación y, aunque Hernán me está asegurando desde arriba, siento que el hielo
cederá en cualquier momento.
Así funciona nuestro ascenso: primero sube Nehemio, y poco después Hernán; Andrea y Manu suben
asegurados por Nehemio con cuatro metros de distancia; al final subo yo, asegurado por Hernán. Así,
logramos hacerlo más o menos en paralelo y no perder tanto tiempo subiendo una cordada después de la
otra.
El grupo de argentinos está en el último largo, ya para llegar a la arista de la cumbre, podemos
verlos desde donde estamos. Hay viento fuerte y vemos cómo suben las nubes y encierran a nuestros
amigos argentinos. No tardó en encerrarnos a nosotros.
El viento nos llega de lado, y cómo es húmedo, nos llena de escarcha todo el lado izquierdo del
cuerpo. Tampoco podemos ver mucho. La visibilidad ha de ser de unos 30 metros. Ya no escucho a
Hernán gritar que ya puedo empezar a escalar, pero veo cómo Nehemio me hace señas como de _ya puedes
subir_.
En las reuniones, mientras esperamos a que los guías suban y monten la siguiente, nos congelamos.
Las manos frías y entumidas sirven de poco al momento de querer poner un mosquetón en mi arnés, que
sigue debajo de mi chamarra de pluma. No siento los pies y estoy temblando violentamente.
---
_¿Van a escalar?_, -- Sí -- _¡Órale, qué chévere!_ El lobby del hostal parecía mercado con todas las
niñas que estaban de viaje escolar corriendo y gritando.
Ya estamos en la van camino al Valle del Ishinka. Chara olvidó el libro de chistes que nos dijo que
iba a traer. Lo que sí trajo fue un balón para jugar fútbol en el campo base. Me canso solo de
imaginarlo, un partido de fútbol a cuatro mil metros, ¿a quién se le ocurre?
Este valle es mucho más bonito que el de Santa Cruz. El río tiene un color azul turquesa, hay
árboles con corteza roja que hacen una especie de túnel sobre el camino. Hay mucha vegetación. Sobre
todo, hay sombra y viento que nos refresca en la subida. La comida a mitad de camino fue una pieza
de pollo en guisado y un poco de camote; exquisito.
El campo base está lleno de gente. Dice Manu que es como Disneylandia. Contamos unas 60 tiendas, más
los que estén en el refugio. La vista del Toclla es impresionante, tiene un glaciar imponente. La
idea es subir al día siguiente y encumbrar el 15
Recibimos el pronóstico del clima. Mañana estará tranquilo, y el domingo habra viento, nieve, lluvia
y frío. Así que, ¡cambio de planes! Mañana haremos el _summit push_. Nos vamos a saltar el campo
alto e iremos directo a la cima.
Hernán dice que debemos despertarnos a las diez de la noche. El camino es largo y si queremos llegar
a la cumbre a buena hora debemos salir a las once. Son las siete y apenas estamos terminando de
cenar, con suerte dormiremos dos horas. Lo bueno es que ya tenemos todo listo para mañana: las
mochilas, las botas y la ropa.
-- !Ring ring, Manuel! -- Grita Nehemio para despertarnos. Nos dio cuarenta minutos más de sueño,
pero ya hay que arreglarnos, comer algo rápido y salir. En realidad seguimos llenos de la cena de
hace rato, y con un té y una galleta con mermelada nos basta. Empezamos la escalada a las 23:45, y
Ezequiel nos está ayudando a cargar algunas cosas hasta el glaciar, mis botas incluidas.
El camino por la morrena está horrible. Hay zonas con nieve y hielo, pero todavía no podemos
ponernos los crampones, entonces está muy resbaloso. Tres horas y media después, con los pies
helados, llegamos a la lengua del glaciar. Las cardadas son las mismas que en el Arteson; Andrea,
Manuel y Nehemio en una, y Hernán y yo en la otra.
El viento empezó como a las tres de la mañana. Un viento helado y seco. Lo bueno es que no se siente
tan frío como el viendo en el Artesonraju y mientras sigamos caminando, no está tan mal.
Creo que llevamos buen paso. Muy a lo lejos se ven cuatro linternas, pero cada vez estamos más cerca
de ellas. Ahora caminamos en zigzag para esquivar todas las grietas que tiene el glaciar, así que
para cuando alcanzamos a los que están delante de nosotros, ya amaneció.
Los primeros que alcanzamos son un francés con su guía. Nos dijo Nehemio que el guía es el anterior
director técnico de la escuela de guías, o sea el que se encargaba de observar el comportamiento de
los aspirantes durante los tres años que dura el curso. ya decidieron bajarse de la montaña porque
la nieve está en malas condiciones. La otra cordada es una pareja de mexicanos. Están intentando
subir por un serac roto, que bloquea la ruta. Los esperamos solo un rato; sin embargo, parece que la
ruta en realidad va por otro lado. El glaciar en esa zona tiene una grieta que estaba bloqueando el
paso de los escaladores en expediciones pasadas. Nos habían contado que la forma de superarlo era
yendo por la izquierda.
Dejamos a los otros mexicanos pelearse con el serac y nos movimos a una rampa de nieve más a la
izquierda para evitar pasar por la grieta del serac. Yo estoy asegurando a Hernán, que sube por la
rampa diciendo que la nieve está muy suelta y en malas condiciones. Veo cómo cada movimiento hace
que caiga nieve por montones. Al mismo tiempo, Manu asegura a Nehemio, que sube unos metros debajo
de Hernán. Es la primera vez que veo que protegen la ruta. Hernán puso un tornillo de hielo, que
también usó Nehemio. Para que hagan eso, la nieve debe estar verdaderamente mala.
Subo pasando a Andrea y Manu por la derecha; ellos siguen anclados en la reunión. Ahora entiendo el
sufrimiento de los guías. Esta nieve está tan suelta que aún con piolets y crampones, no me siento
con la confianza

View File

@ -0,0 +1,257 @@
+++
title = "Infrastructure as Code"
description = "Notes from the book"
date = 2021-02-07
updated = 2025-03-12
[taxonomies]
tags = ["Infrastructure", "Automation", "Quality"]
+++
# Introduction
This is a summary of the book **Infrastructure as Code (2nd edition)** by Kief Morris, where I state the things that stood out the most to
me.
# What is Infrastructure as Code?
Organizations are becoming increasingly "digital" [^1], and as they do, the IT infrastructure becomes more and more complex: more services,
more users, more business activities, suppliers, products, customers, stakeholders... and the list goes on and on.
[^1]: short for 'software systems are essential for our business'
Infrastructure automation tools help manage this complexity by keeping the entire infrastructure as code. This will help by optimizing for
change. People say: we dont make changes often enough to justify automating them; we should build first, automate later; we must choose
between speed and quality. These all lead to a "Fragile Mess"; changing the infrastructure becomes a cumbersome error-prone process.
When prioritizing quality, many organizations put in place complex processes to change even the tiniest detail of their infrastructure;
which eventually are forgotten due to an approaching deadline. Other companies prioritize speed (move fast and break things); infrastructure
that "just works, but no one knows how" is a very dangerous thing to have. Making changes to it becomes more an obscure art than a clear
process. The only sustainable way of maintaining infrastructure is by prioritizing equally both quality and speed; this may seem like an
unattainable ideal, but it is where we find high performers.
[Dora](https://www.devops-research.com/research.html) identified four key metrics in their _Accelerate_ research:
1. **Delivery lead time**: The elapsed time it takes to implement, test, and deliver changes to the production system
2. **Deployment frequency**: How often you deploy changes to production systems
3. **Change fail percentage**: What percentage of changes either cause an impaired service or need immediate correction, such as a rollback
or emergency fix
4. **Mean Time to Restore (MTTR)**: How long it takes to restore service when there is an unplanned outage or impairment Organizations that
perform well against
# Core Practices
- **Define everything as code**: Enables reusability, consistency and transparency
- **Continuously test and deliver all work in progress**: Build quality in instead of trying to test quality in
- **Build small, simple pieces that you can change independently**: The larger a system is, the harder it is to change, and the easier it is
to break.
# Principles
- **Assume systems are unreliable**: Cloud scale infrastructure has so many moving parts, that even when using reliable hardware, systems
fail
- **Make everything reproducible**: It removes the fear and risk of making changes
- **Create disposable things**: It should be possible to add, remove, start, stop, change and move parts of the the system. The cloud
abstracts resources (storage, compute, networking) from physical hardware. The system should be able to dispose faulty parts and heal
itself.
- **Minimize variation**: Keep the minimum number of different _types_ of pieces possible; Its easier to manage one hundred identical
servers than five completely different servers.
- **Ensure you can repeat any process**: This will help make things reproducible
Of course, to use code to define and manage infrastructure, a dynamic infrastructure platform is required. The platform should expose its
functionality to provision resources via APIs or something of the sorts, think Amazon Web Services, Microsoft Azure, Google Cloud Platform,
Digital Ocean, even VMWare.
# Infrastructure code
If I want to create a server, it is easier, and faster to go to the platform (AWS) and click through the GUI instead of writing a script for
it. On the other hand, provisioning it through code will make the process reusable, consistent and transparent. It is obvious that defining
this server as code, is the better approach. Defining everything as code enables you to leverage speed to improve quality, much like Agile
uses speed to improve software quality by having tight feedback loops and iterating on that feedback.
Infrastructure code can use declarative or imperative
languages[^2: "Imperative code is a set of instructions that specifies how to make a thing happen. Declarative code specifies what you want, without specifying how to make it happen."].
# Infrastructure stacks
An infrastructure stack is a group of resources that are defined, provisioned and updated as a unit. For example: A stack may include a
virtual machine, a disk volume and a subnet.
Examples of stack management tools include:
- [HashiCorp Terraform](https://www.terraform.io/)
- [AWS CloudFormation](https://aws.amazon.com/cloudformation/)
- [Azure Resource Manager](https://docs.microsoft.com/en-us/azure/azure-resource-manager/management/overview)
- [Google Cloud Deployment Manager](https://cloud.google.com/deployment-manager/docs/)
- [OpenStack Heat](https://wiki.openstack.org/wiki/Heat)
- [Pulumi](https://www.pulumi.com/)
- [Bosh](https://bosh.io/docs/)
## Patterns and Antipatterns for Structuring Stacks
One challenge with infrastructure design is deciding how to size and structure stacks.
### Antipatterns
- **Monolithic stack**: An entire system into one stack. Changing a large stack is riskier than changing a smaller stack. More things can go
wrong. Larger stacks take longer to provision and change. Because of the risk and slowness, people make less changes to it, and with less
frequency. This increases the levels of technical debt
### Patterns
- **Application group stack**: Multiple, related pieces of a system into stacks. It's common for application group stacks to grow into
monolithic stacks; but it can work well when a single team owns the infrastructure of all the pieces of an application.
- **Service stack**: Infrastructure for a single application into a single stack. Service stacks align the boundaries of infrastructure to
the software that runs on it. There could be an unnecessary duplication of code, which can encourage inconsistency. Reusing shareable
modules is encouraged.
- **Micro stack**: Breaks the infrastructure for a given application or service into multiple stacks. Different parts of a services
infrastructure may change at different rates. Although parts are smaller, having many of them increases complexity.
# Building Environments with Stacks
Environments and stacks are both collections of resources. What makes environments different is that they are organized around a particular
purpose (support the testing phase, provide service in a geographical region). An environment is composed by one or multiple stacks.
Although possible, a stack should not provision multiple environments.
Consistency across environments is one of the main drivers of Infrastructure as Code. If the testing environment is not the same as the one
in production, you may find that some things behave differently; you may even push broken code that "works" in the testing environment. How
many time have we heard "it runs okay in my local environment"?
## Antipattern: Multi-Environment Stack
It defines and manages multiple environments in a single stack instance. Every time you change a testing environment, you risk breaking the
production if there was a mistake, a bug in the tools or somewhere in the pipeline.
## Antipattern: Copy-Paste Environment
It uses separate source-code projects to manage each environment. Because environments should be identical, people resort to copy-pasting
the code from the modified stack into the other ones. As you can imagine, this can get messy pretty fast. Maybe some tweaks to reduce costs
are incompatible with the production environment. This causes configuration drift, and makes it confusing for people looking at the code for
the first time.
## Pattern: Reusable Stack
Here, you maintain a single source-code project, with enough configuration parameters to provision all the required environments. However,
it could be too rigid for situations where environments must be heavily customized.
The reusable stack should be the foundation for any new environment. There are several ways to configure the configuration parameters it
requires to be unique, let's look at those.
## Antipattern: Manual Stack Parameters
The most natural approach to provide values for a stack instance is to type the values on the command line manually.
```bash
stack up environment=production --source my-stck/src
# FAILURE: No such directory 'my-stck/src'
stack up environment=production --source my-stack/src
# SUCCESS: new stack 'production' created
stack destroy environment=production --source my-stack/src
# SUCCESS: stack 'production' destroyed
stack up environment=production --source my-stack/src
# SUCCESS: existing stack 'production' modified
```
## Pattern: Stack Environment Variables
The stack environment variables pattern involves setting parameter values as environment variables for the stack tool to use. This pattern
is often combined with another pattern to set the environment variables.
```bash
export STACK_ENVIRONMENT=test
export STACK_CLUSTER_MINIMUM=1
export STACK_CLUSTER_MAXIMUM=1
export STACK_SSL_CERT_PASSPHRASE="correct horse battery staple"
```
## Pattern: Scripted Parameters
Scripted parameters involves hardcoding the parameter values into a script that runs the stack tool. You can write a separate script for
each environment or a single script that includes the values for all of your environments
```ruby
if ${ENV} == "test"
stack up cluster_maximum=1 env="test"
elsif ${ENV} == "staging"
stack up cluster_maximum=3 env="staging"
elsif ${ENV} == "production"
stack up cluster_maximum=5 env="production"
end
```
## Pattern: Stack Configuration Files
We can manage the parameters for each stack instance in separate files.
For example:
```text
├── src/
│ ├── cluster.tf
│ ├── host_servers.tf
│ └── networking.tf
├── environments/
│ ├── test.tfvars
│ ├── staging.tfvars
│ └── production.tfvars
└── test/
```
This pattern is simple and very useful when the environments don't change often, as it requires to create and commit a new configuration
file per environment. It is also slower to reach the stable production environments, since these files have to progress through the various
stages before being committed to the main branch.
## Pattern: Wrapper Stack
Let's say we write our infrastructure code as reusable modules, kind of like a library. A wrapper stack is a code project that imports and
uses those modules, passing the appropriate parameters to them. In that sense, every environment is its own code project. We can set up
independent repositories for them using git. The only thing we must make sure does not get into these repos are secrets, for those we can
leverage other patterns, like using environment variables, or untracked configuration files.
There's a catch, though. There is added complexity by having to manage the reusable modules and the projects that use them. Furthermore,
having separate projects tempt people into adding specific logic to one of them to customize it without including it upstream.
## Pattern: Pipeline Stack Parameters
Delivery pipelines allow us to set at the very least environment variables via some admin panel, a CLI or even an API. We can hook up our
project in one such pipeline, and let it configure our project. An added bonus is that the entire team can use it without having to share
parameters or even secrets. The catch is that we become dependant on the pipeline. If for some reason, the pipeline is offline, or simply
not accessible, we cannot make changes to our infrastructure.
Also, the first thing attackers look for when gaining access to a network are CI/CD servers because they are full of admin-level
credentials.
## Pattern: Stack Parameter Registry
We can store all our parameters in a centralized registry, this can range from simple files in some file server, to a Consul cluster or a
SQL database. Of course, we have to use another pattern to set the connection parameters to our registry; but once plugged it, changing it
becomes a matter of executing a query, or modifying a file.
The main issue with this pattern is the fact that we are adding something more to manage, the registry itself. This registry is an extra
moving piece and a potential point of failure.
# Testing
If good testing yields great results for application development, we can assume it will be very useful while defining our infrastructure as
code.
Testing infrastructure code is not easy. Many frameworks use declarative languages, which makes unit tests for these declarations somewhat
useless. If we declare that we want a server with 1 CPU and 2 GB of RAM, the test that we write might check for these attributes. We are
simply re-stating what we declared in the code. With this test, we are testing that the provider and the tool works as promised, not that
our code does what we intend. Unit testing is reserved, then, for those sneaky pieces of code that are influenced by configuration
parameters, and why not, for testing around known issues with the tools of platforms we use.
The heavy burden will lie on integration tests. If we want three subnets: `subnet-a` is public, `subnet-b` is private, `subnet-c` is private
and does not have access to the other subnets; we can test for exactly that, that we can reach `subnet-a` through internet, that `subnet-b`
can access `subnet-a`, and that `subnet-c` cannot access the other subnets. This is much more useful, since we are testing several moving
pieces. Provisioning networks include access rules, routing tables, subnet masks, internet gateways, nat gateways, and some other pieces
that provisioned together will provide a useful network; we might as well test that it works like we want it to.
Integration tests are slower than unit tests, since we must provision actual resources on real platforms. The slowness gets worse when we
make end-to-end tests. When we provision our entire infrastructure to test, that is.
The key thing is to identify risks, in our case, our risk is that we may accidentally expose `subnet-c` to the outside world; therefore we
test to ensure that risk does not become a reality.

View File

@ -0,0 +1,208 @@
+++
title = "Gitea - Open Source GitHub Alternative"
description = "Self-hosting Gitea, a lightweight GitHub alternative written in Go."
date = 2025-02-17
updated = 2025-03-12
[taxonomies]
tags = ["Self-Hosting", "CI/CD", "Linux", "Gitea", "Go"]
+++
## Introduction
Gitea is a lightweight, self-hosted alternative to GitHub, providing Git repository hosting, CI/CD, package management, and team
collaboration features. If you value privacy, control, and flexibility over your development workflow, self-hosting Gitea can be a great
choice.
This guide will walk you through setting up Gitea using Docker and Caddy as a reverse proxy, along with configuring a runner for CI/CD
pipelines.
## Prerequisites
Before we start, ensure you have the following:
1. **A Linux server** - I am using [Pop!_OS 24.04 LTS alpha](https://system76.com/cosmic/).
2. **A domain name** pointing to your server - I am using [Cloudflare](https://www.cloudflare.com/).
3. **A container runtime** - I am using [Docker](https://www.docker.com/).
4. **A reverse proxy** - I am using [Caddy](https://caddyserver.com/).
## Installation
### Step 1: Create a Storage Location
To store repository files and application data, I will create a dataset in my ZFS pool at `/data` (ZFS is optional; use any directory if ZFS
is unavailable):
```bash
sudo mkdir -p /data/gitea
```
### Step 2: Create a Gitea User
We need a dedicated user for running Gitea:
```bash
adduser \
--system \
--shell /bin/bash \
--gecos 'Gitea' \
--group \
--disabled-password \
--home /home/gitea \
gitea
```
### Step 3: Set Up Docker Compose
Create a `docker-compose.yaml` file for Gitea:
```yaml
# ~/gitea/compose.yaml
services:
server:
image: docker.gitea.com/gitea:latest
environment:
USER: gitea
USER_UID: 122 # Ensure this matches the Gitea user ID
USER_GID: 126 # Ensure this matches the Gitea group ID
GITEA__database__DB_TYPE: postgres
GITEA__database__HOST: db:5432
GITEA__database__NAME: gitea
GITEA__database__USER: gitea
GITEA__database__PASSWD: __REDACTED__
GITEA__server__DOMAIN: gitea.example.com
GITEA__server__HTTP_PORT: 9473
GITEA__server__ROOT_URL: https://gitea.example.com/
GITEA__server__DISABLE_SSH: false
GITEA__server__SSH_PORT: 22022
restart: always
volumes:
- /data/gitea:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
ports:
- "9473:9473"
- "22022:22"
depends_on:
- db
db:
image: docker.io/library/postgres:17
restart: always
environment:
POSTGRES_USER: gitea
POSTGRES_PASSWORD: __REDACTED__
POSTGRES_DB: gitea
volumes:
- postgres-data:/var/lib/postgresql/data
volumes:
postgres-data:
driver: local
```
### Step 4: Start Gitea
Run the following command to start Gitea:
```bash
docker compose up --detach
```
> **Note:** Gitea's SSH server is set to port `22022` because port `22` is already in use by my existing SSH setup. Adjust this as needed.
## Setting Up the Reverse Proxy
We will use Caddy to handle HTTPS and reverse proxy requests for Gitea. Add the following to your `Caddyfile`:
```plaintext
gitea.example.com {
tls {
dns cloudflare __CLOUDFLARE_TOKEN__
resolvers 1.1.1.1
}
reverse_proxy localhost:9473
}
```
Reload Caddy to apply the changes:
```bash
sudo systemctl reload caddy
```
## Configuration
Now, open your browser and navigate to `https://gitea.example.com` (or `http://localhost:9473` if testing locally). Complete the setup by
filling in:
- Admin account details
- Database configuration
- SMTP settings (if needed)
Once configured, click **Install Gitea**.
## Setting Up CI/CD
Gitea has built-in CI/CD capabilities. We will deploy an [Act Runner](https://docs.gitea.com/usage/actions/act-runner) to run pipelines.
### Step 1: Create Runner Configuration
The following `runner-config.yaml` addresses an issue where the runner cannot cache due to job containers being on different networks:
```yaml
# ~/gitea/runner-config.yaml
cache:
enabled: true
dir: "/data/cache"
host: "192.168.1.10" # Replace with your server's private IP
port: 9012 # Port Gitea listens on
```
### Step 2: Update Docker Compose
Modify `docker-compose.yaml` to add the runner service:
```yaml
# ~/gitea/compose.yaml
services:
runner:
image: docker.io/gitea/act_runner:latest
environment:
CONFIG_FILE: /config.yaml
GITEA_INSTANCE_URL: https://gitea.example.com/
GITEA_RUNNER_REGISTRATION_TOKEN: __REDACTED__
GITEA_RUNNER_NAME: runner-1
ports:
- "9012:9012"
volumes:
- ./runner-config.yaml:/config.yaml
- gitea-runner-data:/data
- /var/run/docker.sock:/var/run/docker.sock
volumes:
gitea-runner-data:
driver: local
```
### Step 3: Start the Runner
Run the following command:
```bash
docker compose up --detach
```
## Conclusion
Self-hosting Gitea is relatively straightforward and provides full control over your development workflow. However, setting up CI/CD and
reverse proxying may require some tweaking to fit your setup.
### Next Steps
- Configure **OAuth authentication** (e.g., GitHub, GitLab, LDAP).
- Set up **automated backups** to avoid data loss.
- Enable **Gitea Webhooks** for integration with external services.
If you have any questions, feel free to reach out. Happy coding!

View File

@ -0,0 +1,189 @@
+++
title = "Self-Hosting a Website in 2025"
description = "A step-by-step guide to hosting your own website using Zola, Caddy, and Gitea."
date = 2025-03-12
updated = 2025-03-12
[taxonomies]
tags = ["Self-Hosting", "CI/CD", "Linux", "Caddy", "Zola"]
[extra]
social_media_card = "img/social_cards/blog_self_hosting_a_blog_in_2025.jpg"
+++
## Introduction
[Zola](https://www.getzola.org/) is a static site generator written in Rust that is fast, simple, and easy to use. This blog is built using
Zola and hosted on a Linux server with [Caddy](https://caddyserver.com/) as the web server.
This guide will walk you through setting up Zola and Caddy to self-host your website efficiently.
## Prerequisites
Before starting, ensure you have the following:
1. **A Linux server** I am using [Pop!_OS 24.04 LTS alpha](https://system76.com/cosmic/).
2. **A domain name** pointing to your server I use [Cloudflare](https://www.cloudflare.com/).
3. **A reverse proxy** [Caddy](https://caddyserver.com/) handles this role.
4. **A CI/CD platform** I use [Gitea](/blog/gitea-open-source-github-alternative) for automated deployments.
5. **A privacy-focused analytics tool** I use [Plausible](https://plausible.io/).
## Installation
### Step 1: Install Zola
Since there is no precompiled package for Pop!_OS 24.04 LTS alpha, we will install Zola from source:
```bash
git clone https://github.com/getzola/zola.git
cd zola
cargo install --path . --locked
zola --version
```
### Step 2: Create a New Site
Initialize a new Zola site:
```bash
zola init blog
cd blog
git init
echo "public" > .gitignore
```
### Step 3: Install a Zola Theme
I use the [tabi](https://github.com/welpo/tabi.git) theme. To install it:
```bash
git submodule add https://github.com/welpo/tabi.git themes/tabi
```
### Step 4: Configure Zola & Tabi
Zola uses a `config.toml` file for configuration. Below is a sample configuration:
```toml
base_url = "https://www.aldofunes.com"
title = "Aldo Funes"
description = "Human being in the making"
default_language = "en"
theme = "tabi"
compile_sass = false
minify_html = true
author = "Aldo Funes"
taxonomies = [{ name = "tags" }, { name = "categories" }]
build_search_index = true
[markdown]
highlight_code = true
highlight_theme = "css"
highlight_themes_css = [{ theme = "dracula", filename = "css/syntax.css" }]
render_emoji = true
external_links_class = "external"
external_links_target_blank = true
smart_punctuation = true
[search]
index_format = "elasticlunr_json"
[extra]
stylesheets = ["css/syntax.css"]
remote_repository_url = "https://gitea.funes.me/aldo/blog"
remote_repository_git_platform = "gitea"
mermaid = true
show_previous_next_article_links = true
toc = true
favicon_emoji = "👾"
```
### Step 5: Add Content
Zola uses Markdown for content creation, and its directory structure is intuitive. Use your favorite text editor to start writing articles.
### Step 6: Deploy Your Site
To serve the site with Caddy, place the generated files in `/www/blog` and configure Caddy with the following `Caddyfile`:
```Caddyfile
aldofunes.com, www.aldofunes.com {
tls {
dns cloudflare __CLOUDFLARE_TOKEN__
resolvers 1.1.1.1
}
root * /www/blog
file_server
handle_errors {
rewrite * /{err.status_code}.html
file_server
}
header Cache-Control max-age=3600
header /static/* Cache-Control max-age=31536000
}
```
### Step 7 (Optional): Set Up a CDN
Using Cloudflare as a CDN improves performance and security. Configure a DNS record and enable Cloudflare proxying to benefit from caching
and DDoS protection.
### Step 8: Automate Deployment with CI/CD
To automate deployments with Gitea, create `.gitea/workflows/deploy.yaml`:
```yaml
name: Deploy
on:
push:
branches:
- main
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@v4
with:
submodules: true
- name: Check 🔍
uses: zolacti/on@check
with:
drafts: true
- name: Build 🛠
uses: zolacti/on@build
- name: Deploy 🚀
uses: appleboy/scp-action@v0.1.7
with:
host: ${{ vars.ATLAS_SSH_HOST }}
username: ${{ vars.ATLAS_SSH_USERNAME }}
key: ${{ secrets.ATLAS_SSH_KEY }}
port: ${{ vars.ATLAS_SSH_PORT }}
source: public
target: /www/blog
rm: true
overwrite: true
strip_components: 1
```
Set these environment variables in Gitea Actions:
- `ATLAS_SSH_HOST`
- `ATLAS_SSH_USERNAME`
- `ATLAS_SSH_PORT`
And add the secret key:
- `ATLAS_SSH_KEY`
These credentials enable secure deployment via SCP.
## Conclusion
You now have a fully self-hosted website powered by Zola and Caddy. With automated CI/CD using Gitea, you can focus on writing content while
Gitea handles deployment. Enjoy your self-hosted blog!

View File

@ -1,13 +0,0 @@
+++
title = "Infrastructure as code"
+++
```rust
pub async fn main() -> Result<(), Error> {
let mut infrastructure = Infrastructure::new();
dbg!(infrastructure);
}
```
This is [a link](https://example.com) to example.com.

View File

@ -1,7 +1,10 @@
+++ +++
title = "About" title = "About Me"
template = "info-page.html" template = "info-page.html"
path = "about" path = "about"
+++ +++
# About me I am Aldo Funes, a passionate engineer capable of putting together the most complex workflows, and enjoying every bit.
I am a software engineer with a strong background in computer science and a passion for technology. I have experience in software
development, data analysis, and machine learning. I am always looking for new challenges and opportunities to learn and grow.

View File

@ -1,12 +1,12 @@
+++ +++
title = "Airtm" title = "Airtm"
description = "A dollar wallet" description = "Airtm is the most connected digital wallet in the world, offering an integrated US Virtual Account for non-US citizens, direct withdrawals from partners like Payoneer and access to over 500+ payment methods to convert and withdraw funds to your local currency. "
weight = 1 weight = 1
[taxonomies] [taxonomies]
tags = ["tag one", "tag 2", "third tag"] tags = ["typescript", "node.js", "event-driven"]
[extra] [extra]
local_image = "img/seedling.png" local_image = "img/airtm-logo.svg"
link_to = "https://airtm.com" link_to = "https://airtm.com"
+++ +++

View File

@ -0,0 +1,6 @@
+++
title = "LOVIS API"
url = "https://www.lovis.com"
weight = 1
description = "The API allows customers to integrate their systems with LOVIS EOS. The interface area consists of a GraphQL API and webhook notifications."
+++

View File

@ -0,0 +1,14 @@
+++
title = "E-Commerce integration with LOVIS EOS"
weight = 1
description = "Integration of LOVIS EOS with E-Commerce plaforms to automate inventory, logistics and invoicing."
[extra]
social_media_card = "img/social_cards/projects_lovls_ecommerce.jpg"
+++
Integration of LOVIS EOS with E-Commerce plaforms to automate inventory, logistics and invoicing.
- Shopify: [beerhouse.mx](https://www.beerhouse.mx/)
- Mercado Libre: [La Liga de la Cerveza](https://www.mercadolibre.com.mx/perfil/LALIGADELACERVEZASDERL)
- Amazon Marketplace

View File

@ -0,0 +1,11 @@
+++
title = "End-to-end custom swimwear production system"
weight = 1
description = "Customers are able to design and purchase customized swimwear products. When an order is completed, the factory receives the information required to produce them. It is a popular product for teams and schools."
[taxonomies]
tags = ["react", "graphql", "nodejs"]
[extra]
link_to = "https://designer.qswimwear.com"
+++

View File

@ -1,11 +0,0 @@
+++
title = "tabi"
description = "A feature-rich modern Zola theme with first-class multi-language support."
weight = 1
[taxonomies]
tags = ["tag one", "tag 2", "third tag"]
[extra]
local_image = "img/seedling.png"
+++

View File

@ -56,6 +56,8 @@ function convert_filename_to_url() {
url="${url%%_index*}" url="${url%%_index*}"
fi fi
url=$(echo "$url" | sed -r 's/([0-9]{4}-[0-9]{2}-[0-9]{2}-)//') # Replace datetime.
# Return the final URL with a single trailing slash. # Return the final URL with a single trailing slash.
full_url="${lang_code}${url}" full_url="${lang_code}${url}"
echo "${full_url%/}/" echo "${full_url%/}/"

File diff suppressed because one or more lines are too long

View File

@ -56,6 +56,8 @@ function convert_filename_to_url() {
url="${url%%_index*}" url="${url%%_index*}"
fi fi
url=$(echo "$url" | sed -r 's/([0-9]{4}-[0-9]{2}-[0-9]{2}-)//') # Replace datetime.
# Return the final URL with a single trailing slash. # Return the final URL with a single trailing slash.
full_url="${lang_code}${url}" full_url="${lang_code}${url}"
echo "${full_url%/}/" echo "${full_url%/}/"

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" width="200" height="49" viewBox="0 0 200 49" fill="none">
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.1614 36.0133L22.7485 12.0771L29.121 25.2251L11.1614 36.0144V36.0133ZM47.2725 45.0204L39.119 28.2003L47.0625 23.4282L42.9898 16.8923L35.7478 21.2432L27.8122 4.8691C27.3264 3.86566 26.5852 3.03362 25.6692 2.46152C24.7463 1.88716 23.6873 1.59657 22.6055 1.62494C21.5226 1.65219 20.4772 1.99613 19.5827 2.61817C18.728 3.21297 18.0469 4.04388 17.6076 5.02462L1.70479 37.8771L1.66733 37.9657C0.609409 40.4607 1.30977 43.2871 3.40972 45.0011C5.47107 46.6834 8.31338 46.8264 10.528 45.3769L32.4934 32.1823L38.7161 45.0216H47.2725V45.0204ZM58.5602 1.62329H50.1873V44.9705H58.5602V1.62329ZM72.0408 21.8924H84.4777V21.8936C90.8812 21.8936 93.0362 18.5357 93.0362 14.7498C93.0362 10.9638 90.8812 7.8511 84.5392 7.8511H72.0408V21.8924ZM86.0791 1.62329C98.024 1.62329 101.841 7.97308 101.841 14.6888C101.841 19.7555 99.1938 25.0063 92.051 26.8383L103.318 44.9705H93.8358L83.3694 27.9373H72.0408V44.9705H63.6679V1.62329H86.0791ZM104.363 8.339H120.309V44.9705H128.684V8.339H144.63V1.62329H104.363V8.339ZM160.806 1.62329H148.431H148.43V44.9705H156.003V13.5898C156.003 11.6359 155.88 9.86487 155.695 8.339H155.941C156.187 9.50012 156.804 11.759 157.42 13.4069L169.857 44.9717H177.738L190.174 13.4069C190.852 11.7578 191.59 9.31598 191.776 8.339H192.022C191.837 9.92704 191.714 11.8188 191.714 13.5898V44.9705H199.534V1.62329H187.035L175.275 31.844C174.598 33.6748 174.228 34.7738 173.982 36.2398H173.859C173.64 34.9374 173.324 33.9725 172.824 32.444C172.762 32.2534 172.696 32.0539 172.628 31.844L160.806 1.62329Z" fill="#050505"></path>
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

BIN
static/img/profile.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 343 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB