Clean up shell init structure and add Zed editor config

- Remove .init/ directory and associated shell scripts
- Update .gitignore and .zshrc configurations
- Add Zed editor settings.json
This commit is contained in:
2025-11-04 01:17:11 +01:00
parent 7eca0bcc47
commit 3f3cfd54b9
16 changed files with 308 additions and 1409 deletions

231
.config/zed/settings.json Normal file
View File

@@ -0,0 +1,231 @@
// Zed settings
//
// For information on how to configure Zed, see the Zed
// documentation: https://zed.dev/docs/configuring-zed
//
// To see all of Zed's default settings without changing your
// custom settings, run `zed: open default settings` from the
// command palette (cmd-shift-p / ctrl-shift-p)
{
// UI & Theme
"ui_font_size": 16,
"buffer_font_size": 15,
"theme": {
"mode": "system",
"light": "One Light",
"dark": "One Dark"
},
// Universal LSP Configuration
"lsp": {
"universal-lsp": {
"binary": {
"path": "/home/valknar/.local/bin/universal-lsp",
"arguments": [
"lsp",
"--mcp-server=filesystem=npx,-y,@modelcontextprotocol/server-filesystem,/home/valknar/Projects",
"--mcp-server=git=npx,-y,@modelcontextprotocol/server-git",
"--mcp-server=github=npx,-y,@github/github-mcp-server",
"--mcp-server=fetch=npx,-y,@modelcontextprotocol/server-fetch",
"--mcp-server=duckduckgo=npx,-y,@nickclyde/duckduckgo-mcp-server",
"--mcp-server=memory=npx,-y,@modelcontextprotocol/server-memory",
"--mcp-server=sequential=npx,-y,@modelcontextprotocol/server-sequential-thinking",
"--mcp-server=playwright=npx,-y,@microsoft/playwright-mcp",
"--mcp-server=filescope=npx,-y,@joshuarileydev/filescope-mcp",
"--mcp-server=in-memoria=npx,-y,@pi22by7/in-memoria"
]
}
},
"rust-analyzer": {
"binary": {
"path": "/bin/false"
}
},
"basedpyright": {
"binary": {
"path": "/bin/false"
}
},
"ruff": {
"binary": {
"path": "/bin/false"
}
},
"gopls": {
"binary": {
"path": "/bin/false"
}
},
"vtsls": {
"binary": {
"path": "/bin/false"
}
},
"eslint": {
"binary": {
"path": "/bin/false"
}
},
"tailwindcss-language-server": {
"binary": {
"path": "/bin/false"
}
},
"json-language-server": {
"binary": {
"path": "/bin/false"
}
}
},
// LSP Features
"enable_language_server": true,
"show_completions_on_input": true,
"auto_signature_help": true,
// Inlay Hints
"inlay_hints": {
"enabled": true,
"show_type_hints": true,
"show_parameter_hints": true,
"show_other_hints": true
},
// Code Lens
"code_lens": {
"enabled": true
},
// Semantic Index
"semantic_index": {
"enabled": true
},
// Language-specific settings
"languages": {
"Bash": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"C": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"C++": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"C#": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"CSS": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Go": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"HTML": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Java": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"JavaScript": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"JSON": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Kotlin": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"PHP": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Python": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Ruby": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Rust": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Scala": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Svelte": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"TSX": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"TypeScript": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"YAML": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"TOML": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
},
"Markdown": {
"language_servers": ["universal-lsp"],
"format_on_save": "off"
}
},
// Editor Settings
"auto_save": "on_focus_change",
"tab_size": 2,
"soft_wrap": "editor_width",
"show_whitespaces": "selection",
"remove_trailing_whitespace_on_save": true,
"ensure_final_newline_on_save": true,
// Terminal
"terminal": {
"font_size": 14
},
// Git
"git": {
"enabled": true,
"inline_blame": {
"enabled": false
}
},
// Project Panel
"project_panel": {
"dock": "left"
},
// ACP Agent Configuration
// Note: ACP agent will connect to the MCP coordinator started by the LSP server
"agent_servers": {
"Universal LSP": {
"command": "/home/valknar/.local/bin/universal-lsp",
"args": ["acp"],
"env": {}
}
}
}

5
.gitignore vendored
View File

@@ -8,7 +8,6 @@
!.gitmodules
!README.md
!.editorconfig
!*.env
!.prettierrc
!.prettierignore
!package.json
@@ -36,6 +35,10 @@
!biome.json
!arty.yml
!/.config/
!/.config/zed/
!/.config/zed/settings.json
!/.github/
!/.github/**

View File

@@ -1,17 +0,0 @@
#!/bin/bash
alias ri='source ~/.init/init.sh'
# git
alias g0='git add . && git diff --quiet && git diff --cached --quiet'
alias g1='git reset $(git commit-tree "HEAD^{tree}" -m "A new start")'
alias g2='git log --format=%B -n 1 HEAD | head -n 1'
# rsync
alias rs='rsync --rsync-path="sudo rsync" -avzhe ssh'
# serve static files
alias ss='python -m http.server 8000 -d'
# download youtube mp3
alias yt='yt-dlp -x --audio-format mp3'

View File

@@ -1,477 +0,0 @@
#!/usr/bin/env bash
#############################################################################
# GitHub Artifact Downloader
#
# Download and extract GitHub Actions artifacts with style
#
# Usage:
# artifact_github_download.sh <REPO> [OPTIONS]
#
# Arguments:
# REPO GitHub repository (owner/repo)
#
# Options:
# -n, --name NAME Artifact name to download (preselect)
# -o, --output DIR Output directory (default: current directory)
# -h, --help Show this help message
#
# Examples:
# artifact_github_download.sh valknarness/awesome
# artifact_github_download.sh valknarness/awesome -n awesome-database-latest
# artifact_github_download.sh valknarness/awesome -o ~/downloads
#############################################################################
set -euo pipefail
# ============================================================================
# Color Definitions
# ============================================================================
# Check if terminal supports colors
if [[ -t 1 ]] && command -v tput >/dev/null 2>&1; then
COLORS=$(tput colors 2>/dev/null || echo 0)
if [[ $COLORS -ge 8 ]]; then
# Standard colors
RED=$(tput setaf 1)
GREEN=$(tput setaf 2)
YELLOW=$(tput setaf 3)
BLUE=$(tput setaf 4)
MAGENTA=$(tput setaf 5)
CYAN=$(tput setaf 6)
WHITE=$(tput setaf 7)
# Bright colors
BRIGHT_GREEN=$(tput setaf 10 2>/dev/null || tput setaf 2)
BRIGHT_YELLOW=$(tput setaf 11 2>/dev/null || tput setaf 3)
BRIGHT_BLUE=$(tput setaf 12 2>/dev/null || tput setaf 4)
BRIGHT_MAGENTA=$(tput setaf 13 2>/dev/null || tput setaf 5)
BRIGHT_CYAN=$(tput setaf 14 2>/dev/null || tput setaf 6)
# Text formatting
BOLD=$(tput bold)
DIM=$(tput dim 2>/dev/null || echo "")
RESET=$(tput sgr0)
else
RED="" GREEN="" YELLOW="" BLUE="" MAGENTA="" CYAN="" WHITE=""
BRIGHT_GREEN="" BRIGHT_YELLOW="" BRIGHT_BLUE="" BRIGHT_MAGENTA="" BRIGHT_CYAN=""
BOLD="" DIM="" RESET=""
fi
else
RED="" GREEN="" YELLOW="" BLUE="" MAGENTA="" CYAN="" WHITE=""
BRIGHT_GREEN="" BRIGHT_YELLOW="" BRIGHT_BLUE="" BRIGHT_MAGENTA="" BRIGHT_CYAN=""
BOLD="" DIM="" RESET=""
fi
# ============================================================================
# Logging Functions
# ============================================================================
log_info() {
echo -e "${BRIGHT_BLUE}${BOLD}${RESET} ${CYAN}$*${RESET}" >&2
}
log_success() {
echo -e "${BRIGHT_GREEN}${BOLD}${RESET} ${GREEN}$*${RESET}" >&2
}
log_warning() {
echo -e "${BRIGHT_YELLOW}${BOLD}${RESET} ${YELLOW}$*${RESET}" >&2
}
log_error() {
echo -e "${RED}${BOLD}${RESET} ${RED}$*${RESET}" >&2
}
log_step() {
echo -e "${BRIGHT_MAGENTA}${BOLD}${RESET} ${MAGENTA}$*${RESET}" >&2
}
log_header() {
local text="$*"
local length=${#text}
local line=$(printf '═%.0s' $(seq 1 $length))
echo "" >&2
echo -e "${BRIGHT_CYAN}${BOLD}${line}${RESET}" >&2
echo -e "${BRIGHT_CYAN}${BOLD}${RESET}${BOLD}${WHITE}${text}${RESET}${BRIGHT_CYAN}${BOLD}${RESET}" >&2
echo -e "${BRIGHT_CYAN}${BOLD}${line}${RESET}" >&2
echo "" >&2
}
log_data() {
local label="$1"
local value="$2"
echo -e " ${DIM}${label}:${RESET} ${BOLD}${value}${RESET}" >&2
}
# ============================================================================
# Helper Functions
# ============================================================================
check_dependencies() {
local missing=()
if ! command -v gh &> /dev/null; then
missing+=("gh (GitHub CLI)")
fi
if ! command -v jq &> /dev/null; then
missing+=("jq")
fi
if ! command -v unzip &> /dev/null; then
missing+=("unzip")
fi
if [[ ${#missing[@]} -gt 0 ]]; then
log_error "Missing required dependencies:"
for dep in "${missing[@]}"; do
echo -e " ${RED}${RESET} ${dep}"
done
exit 1
fi
}
check_gh_auth() {
if ! gh auth status &> /dev/null; then
log_error "Not authenticated with GitHub CLI"
log_info "Run: ${BOLD}gh auth login${RESET}"
exit 1
fi
}
show_help() {
cat << EOF
${BOLD}${BRIGHT_CYAN}GitHub Artifact Downloader${RESET}
${BOLD}USAGE:${RESET}
$(basename "$0") ${CYAN}<REPO>${RESET} [${YELLOW}OPTIONS${RESET}]
${BOLD}ARGUMENTS:${RESET}
${CYAN}REPO${RESET} GitHub repository (${DIM}owner/repo${RESET})
${BOLD}OPTIONS:${RESET}
${YELLOW}-n, --name NAME${RESET} Artifact name to download (preselect)
${YELLOW}-o, --output DIR${RESET} Output directory (default: current directory)
${YELLOW}-h, --help${RESET} Show this help message
${BOLD}EXAMPLES:${RESET}
${DIM}# Interactive mode - list and select artifacts${RESET}
$(basename "$0") valknarness/awesome
${DIM}# Preselect artifact by name${RESET}
$(basename "$0") valknarness/awesome -n awesome-database-latest
${DIM}# Download to specific directory${RESET}
$(basename "$0") valknarness/awesome -o ~/downloads
${DIM}# Combine options${RESET}
$(basename "$0") valknarness/awesome -n awesome-database-latest -o ~/downloads
EOF
}
format_size() {
local bytes=$1
if (( bytes < 1024 )); then
echo "${bytes}B"
elif (( bytes < 1048576 )); then
awk "BEGIN {printf \"%.1fKB\", $bytes/1024}"
elif (( bytes < 1073741824 )); then
awk "BEGIN {printf \"%.1fMB\", $bytes/1048576}"
else
awk "BEGIN {printf \"%.2fGB\", $bytes/1073741824}"
fi
}
format_date() {
local iso_date="$1"
if command -v date &> /dev/null; then
if date --version &> /dev/null 2>&1; then
# GNU date
date -d "$iso_date" "+%Y-%m-%d %H:%M:%S" 2>/dev/null || echo "$iso_date"
else
# BSD date (macOS)
date -j -f "%Y-%m-%dT%H:%M:%SZ" "$iso_date" "+%Y-%m-%d %H:%M:%S" 2>/dev/null || echo "$iso_date"
fi
else
echo "$iso_date"
fi
}
# ============================================================================
# Main Functions
# ============================================================================
list_artifacts() {
local repo="$1"
log_step "Fetching artifacts from ${BOLD}${repo}${RESET}..."
# First check if there are any artifacts using gh's built-in jq
local count
count=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--jq '.artifacts | length' \
"/repos/${repo}/actions/artifacts?per_page=100" 2>/dev/null)
if [[ -z "$count" ]]; then
log_error "Failed to fetch artifacts from repository"
log_info "Please check that:"
echo " • The repository ${BOLD}${repo}${RESET} exists and you have access"
echo " • GitHub Actions is enabled for this repository"
exit 1
fi
if [[ "$count" -eq 0 ]]; then
log_warning "No artifacts found in repository ${BOLD}${repo}${RESET}"
log_info "This repository may not have any workflow runs that produced artifacts"
exit 0
fi
# Now fetch the full JSON response
local artifacts_json
artifacts_json=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${repo}/actions/artifacts?per_page=100" 2>/dev/null)
echo "$artifacts_json"
}
select_artifact() {
local artifacts_json="$1"
local preselect_name="$2"
# Parse artifacts
local artifacts
artifacts=$(echo "$artifacts_json" | jq -r '.artifacts[] |
"\(.id)|\(.name)|\(.size_in_bytes)|\(.created_at)|\(.workflow_run.id)"')
# If preselect name is provided, find matching artifact
if [[ -n "$preselect_name" ]]; then
local selected
selected=$(echo "$artifacts" | grep -F "|${preselect_name}|" | head -1)
if [[ -z "$selected" ]]; then
log_error "Artifact '${BOLD}${preselect_name}${RESET}' not found"
log_info "Available artifacts:"
echo "$artifacts" | while IFS='|' read -r id name size created workflow; do
echo " ${CYAN}${RESET} ${name}"
done
exit 1
fi
echo "$selected"
return 0
fi
# Interactive selection
log_info "Available artifacts:"
echo ""
local i=1
local -a artifact_array
while IFS='|' read -r id name size created workflow; do
artifact_array+=("$id|$name|$size|$created|$workflow")
local formatted_size=$(format_size "$size")
local formatted_date=$(format_date "$created")
printf " ${BOLD}${YELLOW}[%2d]${RESET} ${BRIGHT_CYAN}%s${RESET}\n" "$i" "$name"
printf " ${DIM}Size: ${RESET}%s ${DIM}Created: ${RESET}%s\n" "$formatted_size" "$formatted_date"
echo ""
((i++))
done <<< "$artifacts"
# Prompt for selection
local selection
while true; do
echo -n -e "${BRIGHT_MAGENTA}${BOLD}<EFBFBD>${RESET} ${MAGENTA}Select artifact [1-$((i-1))]:${RESET} "
read -r selection
if [[ "$selection" =~ ^[0-9]+$ ]] && [[ "$selection" -ge 1 ]] && [[ "$selection" -lt "$i" ]]; then
break
else
log_warning "Invalid selection. Please enter a number between 1 and $((i-1))"
fi
done
echo "${artifact_array[$((selection-1))]}"
}
download_artifact() {
local repo="$1"
local artifact_id="$2"
local artifact_name="$3"
local output_dir="$4"
log_step "Downloading artifact ${BOLD}${artifact_name}${RESET}..."
# Create output directory if it doesn't exist
mkdir -p "$output_dir"
# Download artifact using gh
local zip_file="${output_dir}/${artifact_name}.zip"
if gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${repo}/actions/artifacts/${artifact_id}/zip" \
> "$zip_file" 2>/dev/null; then
log_success "Downloaded to ${BOLD}${zip_file}${RESET}"
echo "$zip_file"
else
log_error "Failed to download artifact"
exit 1
fi
}
extract_artifact() {
local zip_file="$1"
local output_dir="$2"
log_step "Extracting archive..."
# Create extraction directory
local extract_dir="${output_dir}/$(basename "$zip_file" .zip)"
mkdir -p "$extract_dir"
if unzip -q "$zip_file" -d "$extract_dir"; then
log_success "Extracted to ${BOLD}${extract_dir}${RESET}"
# Show extracted files
log_info "Extracted files:"
find "$extract_dir" -type f -exec basename {} \; | while read -r file; do
echo " ${GREEN}${RESET} ${file}"
done
# Remove zip file
rm "$zip_file"
log_info "Cleaned up zip file"
echo "$extract_dir"
else
log_error "Failed to extract archive"
exit 1
fi
}
# ============================================================================
# Main Script
# ============================================================================
main() {
local repo=""
local artifact_name=""
local output_dir="."
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_help
exit 0
;;
-n|--name)
artifact_name="$2"
shift 2
;;
-o|--output)
output_dir="$2"
shift 2
;;
-*)
log_error "Unknown option: $1"
echo ""
show_help
exit 1
;;
*)
if [[ -z "$repo" ]]; then
repo="$1"
else
log_error "Unexpected argument: $1"
echo ""
show_help
exit 1
fi
shift
;;
esac
done
# Validate required arguments
if [[ -z "$repo" ]]; then
log_error "Repository argument is required"
echo ""
show_help
exit 1
fi
# Validate repository format
if [[ ! "$repo" =~ ^[^/]+/[^/]+$ ]]; then
log_error "Invalid repository format. Expected: ${BOLD}owner/repo${RESET}"
exit 1
fi
# Show header
log_header "GitHub Artifact Downloader"
# Check dependencies
log_step "Checking dependencies..."
check_dependencies
log_success "All dependencies found"
# Check GitHub authentication
log_step "Checking GitHub authentication..."
check_gh_auth
log_success "Authenticated with GitHub"
echo ""
log_data "Repository" "${BRIGHT_CYAN}${repo}${RESET}"
if [[ -n "$artifact_name" ]]; then
log_data "Artifact" "${BRIGHT_YELLOW}${artifact_name}${RESET}"
fi
log_data "Output" "${BRIGHT_GREEN}${output_dir}${RESET}"
echo ""
# List artifacts
local artifacts_json
artifacts_json=$(list_artifacts "$repo")
# Select artifact
local selected
selected=$(select_artifact "$artifacts_json" "$artifact_name")
IFS='|' read -r artifact_id name size created workflow <<< "$selected"
echo ""
log_info "Selected artifact:"
log_data " Name" "${BRIGHT_CYAN}${name}${RESET}"
log_data " Size" "$(format_size "$size")"
log_data " Created" "$(format_date "$created")"
echo ""
# Download artifact
local zip_file
zip_file=$(download_artifact "$repo" "$artifact_id" "$name" "$output_dir")
# Extract artifact
local extract_dir
extract_dir=$(extract_artifact "$zip_file" "$output_dir")
# Success summary
echo ""
log_header "Download Complete!"
log_data "Location" "${BOLD}${extract_dir}${RESET}"
echo ""
log_success "All done! 🎉"
}
# Run main function
main "$@"

View File

@@ -1,592 +0,0 @@
#!/usr/bin/env bash
# mime_mp4_gif.sh - Advanced MP4 to Animated GIF converter
# Converts MP4 videos to GIFs with sophisticated keyframe extraction,
# interpolation algorithms, and scheduling distributions
set -euo pipefail
# Default values
KEYFRAMES=10
INPUT_SCHEDULES=1
TRANSITION="linear"
SCHEDULE="uniform"
MAGIC="none"
KEYFRAME_DURATION=100
INPUT_FILE=""
OUTPUT_FILE=""
VERBOSE=false
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Available algorithms
TRANSITIONS=("linear" "sinoid" "cubic" "quadratic" "exponential" "bounce" "elastic")
SCHEDULES=("uniform" "front-load" "back-load" "center-peak" "edge-peak" "fibonacci" "golden-ratio")
MAGICS=("none" "psychedelic" "dither-bloom" "edge-glow" "temporal-blur" "chromatic-shift" "vaporwave")
#############################################################################
# Helper Functions
#############################################################################
print_usage() {
cat << EOF
Usage: $(basename "$0") [OPTIONS] INPUT_FILE [OUTPUT_FILE]
Convert MP4 videos to animated GIFs with advanced frame extraction algorithms.
Arguments:
INPUT_FILE Input MP4 video file (required)
OUTPUT_FILE Output GIF file (optional, defaults to INPUT_FILE.gif)
Options:
-k, --keyframes N Number of keyframes to extract (default: 10)
-d, --keyframe-duration MS Duration of each frame in milliseconds (default: 100)
Valid range: 1-30000 ms
Lower values = faster animation
Higher values = slower animation
-i, --input-schedules N Number of input schedules (default: 1)
1 schedule = entire video duration
N schedules = divide video into N segments
-t, --transition TYPE Interpolation function for frame timing
Available: ${TRANSITIONS[*]}
(default: linear)
-s, --schedule TYPE Algorithm to distribute keyframes across schedules
Available: ${SCHEDULES[*]}
(default: uniform)
-m, --magic TYPE Apply magical effects to the GIF
Available: ${MAGICS[*]}
(default: none)
-v, --verbose Enable verbose output
-h, --help Show this help message
Examples:
# Basic conversion with 15 keyframes
$(basename "$0") -k 15 video.mp4
# Fast animation with 50ms per frame
$(basename "$0") -k 20 -d 50 video.mp4
# Slow animation with 500ms per frame
$(basename "$0") -k 10 -d 500 video.mp4
# Use sinusoidal transition with center-peak distribution
$(basename "$0") -t sinoid -s center-peak -k 20 video.mp4
# Apply psychedelic magic with fibonacci distribution
$(basename "$0") -m psychedelic -s fibonacci -k 13 video.mp4 trippy.gif
# Complex: 3 schedules with cubic interpolation and edge glow
$(basename "$0") -i 3 -t cubic -s front-load -m edge-glow -k 30 video.mp4
EOF
}
log_info() {
echo -e "${BLUE}[INFO]${NC} $*"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $*"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $*"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $*" >&2
}
verbose_log() {
if [[ "$VERBOSE" == "true" ]]; then
log_info "$@"
fi
}
validate_enum() {
local value="$1"
local array_name="$2"
local -n arr=$array_name
for item in "${arr[@]}"; do
if [[ "$value" == "$item" ]]; then
return 0
fi
done
return 1
}
#############################################################################
# Mathematical Functions
#############################################################################
# Calculate transition weight based on interpolation type
# Input: progress (0.0 to 1.0), Returns: weighted value (0.0 to 1.0)
calculate_transition() {
local progress="$1"
local type="$2"
case "$type" in
linear)
echo "$progress"
;;
sinoid)
# Smooth sinusoidal easing
awk -v p="$progress" 'BEGIN { print (1 - cos(p * 3.14159265359)) / 2 }'
;;
cubic)
# Cubic easing in-out
awk -v p="$progress" 'BEGIN {
if (p < 0.5)
print 4 * p * p * p;
else
print 1 - ((-2 * p + 2) ^ 3) / 2;
}'
;;
quadratic)
# Quadratic easing
awk -v p="$progress" 'BEGIN {
if (p < 0.5)
print 2 * p * p;
else
print 1 - ((-2 * p + 2) ^ 2) / 2;
}'
;;
exponential)
# Exponential easing
awk -v p="$progress" 'BEGIN {
if (p == 0) print 0;
else if (p == 1) print 1;
else if (p < 0.5) print (2 ^ (20 * p - 10)) / 2;
else print (2 - (2 ^ (-20 * p + 10))) / 2;
}'
;;
bounce)
# Bouncing effect
awk -v p="$progress" 'BEGIN {
n1 = 7.5625; d1 = 2.75;
x = 1 - p;
if (x < 1/d1) result = n1 * x * x;
else if (x < 2/d1) { x -= 1.5/d1; result = n1 * x * x + 0.75; }
else if (x < 2.5/d1) { x -= 2.25/d1; result = n1 * x * x + 0.9375; }
else { x -= 2.625/d1; result = n1 * x * x + 0.984375; }
print 1 - result;
}'
;;
elastic)
# Elastic spring effect
awk -v p="$progress" 'BEGIN {
c4 = (2 * 3.14159265359) / 3;
if (p == 0) print 0;
else if (p == 1) print 1;
else if (p < 0.5) print -(2 ^ (20 * p - 10) * sin((20 * p - 11.125) * c4)) / 2;
else print (2 ^ (-20 * p + 10) * sin((20 * p - 11.125) * c4)) / 2 + 1;
}'
;;
*)
echo "$progress"
;;
esac
}
# Generate keyframe distribution based on schedule type
generate_schedule_distribution() {
local num_frames="$1"
local schedule_type="$2"
local -n result_array=$3
case "$schedule_type" in
uniform)
for ((i=0; i<num_frames; i++)); do
result_array[$i]=$(awk -v i="$i" -v n="$num_frames" 'BEGIN { print i / (n - 1) }')
done
;;
front-load)
# More frames at the beginning
for ((i=0; i<num_frames; i++)); do
local t=$(awk -v i="$i" -v n="$num_frames" 'BEGIN { print i / (n - 1) }')
result_array[$i]=$(awk -v t="$t" 'BEGIN { print t * t }')
done
;;
back-load)
# More frames at the end
for ((i=0; i<num_frames; i++)); do
local t=$(awk -v i="$i" -v n="$num_frames" 'BEGIN { print i / (n - 1) }')
result_array[$i]=$(awk -v t="$t" 'BEGIN { print 1 - (1 - t) * (1 - t) }')
done
;;
center-peak)
# More frames in the middle
for ((i=0; i<num_frames; i++)); do
local t=$(awk -v i="$i" -v n="$num_frames" 'BEGIN { print i / (n - 1) }')
result_array[$i]=$(awk -v t="$t" 'BEGIN { print 1 - 4 * (t - 0.5) * (t - 0.5) }')
result_array[$i]=$(awk -v val="${result_array[$i]}" -v t="$t" 'BEGIN { print t }')
done
;;
edge-peak)
# More frames at start and end
for ((i=0; i<num_frames; i++)); do
local t=$(awk -v i="$i" -v n="$num_frames" 'BEGIN { print i / (n - 1) }')
result_array[$i]=$(awk -v t="$t" 'BEGIN { print 4 * (t - 0.5) * (t - 0.5) }')
result_array[$i]=$(awk -v val="${result_array[$i]}" -v t="$t" 'BEGIN { print t }')
done
;;
fibonacci)
# Fibonacci sequence distribution
local fib=(1 1)
for ((i=2; i<num_frames; i++)); do
fib[$i]=$((fib[i-1] + fib[i-2]))
done
local sum=0
for val in "${fib[@]}"; do
((sum += val))
done
local cumsum=0
for ((i=0; i<num_frames; i++)); do
((cumsum += fib[i]))
result_array[$i]=$(awk -v c="$cumsum" -v s="$sum" 'BEGIN { print c / s }')
done
;;
golden-ratio)
# Golden ratio distribution
local phi=1.618033988749895
for ((i=0; i<num_frames; i++)); do
result_array[$i]=$(awk -v i="$i" -v n="$num_frames" -v phi="$phi" 'BEGIN {
print ((i * phi) - int(i * phi))
}')
done
# Sort the array for monotonic distribution
IFS=$'\n' result_array=($(sort -n <<<"${result_array[*]}"))
;;
*)
# Default to uniform
for ((i=0; i<num_frames; i++)); do
result_array[$i]=$(awk -v i="$i" -v n="$num_frames" 'BEGIN { print i / (n - 1) }')
done
;;
esac
}
#############################################################################
# Video Processing Functions
#############################################################################
get_video_duration() {
local file="$1"
ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 "$file"
}
extract_frames() {
local input="$1"
local duration="$2"
local -n ts_ref=$3
local temp_dir="$4"
verbose_log "Extracting ${#ts_ref[@]} frames from video..."
for i in "${!ts_ref[@]}"; do
local time="${ts_ref[$i]}"
verbose_log " Frame $((i+1)): ${time}s"
ffmpeg -v quiet -ss "$time" -i "$input" -vframes 1 \
-vf "scale=480:-1:flags=lanczos" \
"${temp_dir}/frame_$(printf "%04d" "$i").png" 2>/dev/null
done
}
apply_magic_effects() {
local magic_type="$1"
local temp_dir="$2"
if [[ "$magic_type" == "none" ]]; then
return 0
fi
verbose_log "Applying magic effect: $magic_type"
case "$magic_type" in
psychedelic)
for frame in "$temp_dir"/*.png; do
ffmpeg -v quiet -i "$frame" -vf "hue=s=3:h=sin(2*PI*t)*360" \
"${frame}.tmp.png" 2>/dev/null && mv "${frame}.tmp.png" "$frame"
done
;;
dither-bloom)
for frame in "$temp_dir"/*.png; do
ffmpeg -v quiet -i "$frame" -vf "format=gbrp,split[a][b],[a]negate[c],[b][c]blend=all_mode=xor,noise=alls=20:allf=t" \
"${frame}.tmp.png" 2>/dev/null && mv "${frame}.tmp.png" "$frame"
done
;;
edge-glow)
for frame in "$temp_dir"/*.png; do
ffmpeg -v quiet -i "$frame" -vf "edgedetect=low=0.1:high=0.3,negate,hue=s=2" \
"${temp_dir}/edges_$(basename "$frame")"
ffmpeg -v quiet -i "$frame" -i "${temp_dir}/edges_$(basename "$frame")" \
-filter_complex "[0:v][1:v]blend=all_mode=addition:all_opacity=0.5" \
"${frame}.tmp.png" 2>/dev/null && mv "${frame}.tmp.png" "$frame"
rm "${temp_dir}/edges_$(basename "$frame")"
done
;;
temporal-blur)
# Create motion blur effect
local frames=("$temp_dir"/*.png)
for i in "${!frames[@]}"; do
local prev_idx=$((i > 0 ? i - 1 : 0))
local next_idx=$((i < ${#frames[@]} - 1 ? i + 1 : ${#frames[@]} - 1))
ffmpeg -v quiet -i "${frames[$prev_idx]}" -i "${frames[$i]}" -i "${frames[$next_idx]}" \
-filter_complex "[0:v][1:v][2:v]blend=all_mode=average" \
"${frames[$i]}.tmp.png" 2>/dev/null && mv "${frames[$i]}.tmp.png" "${frames[$i]}"
done
;;
chromatic-shift)
for frame in "$temp_dir"/*.png; do
ffmpeg -v quiet -i "$frame" -vf "rgbashift=rh=5:bh=-5" \
"${frame}.tmp.png" 2>/dev/null && mv "${frame}.tmp.png" "$frame"
done
;;
vaporwave)
for frame in "$temp_dir"/*.png; do
ffmpeg -v quiet -i "$frame" -vf "curves=vintage,hue=h=300:s=1.5,eq=saturation=1.5:contrast=1.2" \
"${frame}.tmp.png" 2>/dev/null && mv "${frame}.tmp.png" "$frame"
done
;;
esac
}
create_gif() {
local temp_dir="$1"
local output="$2"
local frame_delay="$3"
verbose_log "Creating animated GIF with ${frame_delay}ms per frame..."
# Convert milliseconds to centiseconds (GIF delay unit)
local delay_cs
delay_cs=$(awk -v ms="$frame_delay" 'BEGIN { print int(ms / 10) }')
# Ensure minimum delay of 1 centisecond
if [[ $delay_cs -lt 1 ]]; then
delay_cs=1
fi
# Calculate input framerate (frames are read at this rate)
# For GIF delay, we want 1000ms / frame_delay fps
local fps
fps=$(awk -v ms="$frame_delay" 'BEGIN { printf "%.2f", 1000.0 / ms }')
verbose_log "Frame delay: ${delay_cs} centiseconds (${frame_delay}ms), FPS: ${fps}"
# Generate palette for better color quality
ffmpeg -v error -pattern_type glob -i "${temp_dir}/frame_*.png" \
-vf "scale=480:-1:flags=lanczos,palettegen=stats_mode=diff" \
-y "${temp_dir}/palette.png"
# Create GIF using palette with specified frame delay
ffmpeg -v error -framerate "$fps" -pattern_type glob -i "${temp_dir}/frame_*.png" -i "${temp_dir}/palette.png" \
-filter_complex "[0:v]scale=480:-1:flags=lanczos[scaled];[scaled][1:v]paletteuse=dither=bayer:bayer_scale=5" \
-gifflags +transdiff -y "$output"
}
#############################################################################
# Main Processing
#############################################################################
process_video() {
local input="$INPUT_FILE"
local output="$OUTPUT_FILE"
# Validate input file
if [[ ! -f "$input" ]]; then
log_error "Input file not found: $input"
exit 1
fi
# Get video duration
local duration
duration=$(get_video_duration "$input")
verbose_log "Video duration: ${duration}s"
# Calculate schedule duration
local schedule_duration
schedule_duration=$(awk -v d="$duration" -v s="$INPUT_SCHEDULES" 'BEGIN { print d / s }')
verbose_log "Schedule duration: ${schedule_duration}s (${INPUT_SCHEDULES} schedule(s))"
# Generate frame distribution
local -a distribution
generate_schedule_distribution "$KEYFRAMES" "$SCHEDULE" distribution
verbose_log "Using schedule: $SCHEDULE"
verbose_log "Using transition: $TRANSITION"
# Calculate actual timestamps with transition function
local -a timestamps
for i in "${!distribution[@]}"; do
local base_time="${distribution[$i]}"
local weighted_time
weighted_time=$(calculate_transition "$base_time" "$TRANSITION")
# Map to video duration considering input schedules
local actual_time
actual_time=$(awk -v w="$weighted_time" -v d="$duration" 'BEGIN { print w * d }')
# Ensure we don't exceed video duration
timestamps[$i]=$(awk -v t="$actual_time" -v d="$duration" 'BEGIN {
if (t > d) print d;
else print t;
}')
done
# Create temporary directory
local temp_dir
temp_dir=$(mktemp -d)
trap "rm -rf '$temp_dir'" EXIT
# Extract frames
extract_frames "$input" "$duration" timestamps "$temp_dir"
# Apply magic effects
apply_magic_effects "$MAGIC" "$temp_dir"
# Create GIF with specified frame duration
create_gif "$temp_dir" "$output" "$KEYFRAME_DURATION"
log_success "GIF created successfully: $output"
# Show file size
local size
size=$(du -h "$output" | cut -f1)
log_info "Output size: $size"
}
#############################################################################
# Command Line Parsing
#############################################################################
parse_arguments() {
while [[ $# -gt 0 ]]; do
case "$1" in
-k|--keyframes)
KEYFRAMES="$2"
shift 2
;;
-d|--keyframe-duration)
KEYFRAME_DURATION="$2"
shift 2
;;
-i|--input-schedules)
INPUT_SCHEDULES="$2"
shift 2
;;
-t|--transition)
TRANSITION="$2"
if ! validate_enum "$TRANSITION" TRANSITIONS; then
log_error "Invalid transition type: $TRANSITION"
log_error "Available: ${TRANSITIONS[*]}"
exit 1
fi
shift 2
;;
-s|--schedule)
SCHEDULE="$2"
if ! validate_enum "$SCHEDULE" SCHEDULES; then
log_error "Invalid schedule type: $SCHEDULE"
log_error "Available: ${SCHEDULES[*]}"
exit 1
fi
shift 2
;;
-m|--magic)
MAGIC="$2"
if ! validate_enum "$MAGIC" MAGICS; then
log_error "Invalid magic type: $MAGIC"
log_error "Available: ${MAGICS[*]}"
exit 1
fi
shift 2
;;
-v|--verbose)
VERBOSE=true
shift
;;
-h|--help)
print_usage
exit 0
;;
-*)
log_error "Unknown option: $1"
print_usage
exit 1
;;
*)
if [[ -z "$INPUT_FILE" ]]; then
INPUT_FILE="$1"
elif [[ -z "$OUTPUT_FILE" ]]; then
OUTPUT_FILE="$1"
else
log_error "Too many arguments"
print_usage
exit 1
fi
shift
;;
esac
done
# Validate required arguments
if [[ -z "$INPUT_FILE" ]]; then
log_error "Input file is required"
print_usage
exit 1
fi
# Set default output file
if [[ -z "$OUTPUT_FILE" ]]; then
OUTPUT_FILE="${INPUT_FILE%.*}.gif"
fi
# Validate numeric arguments
if ! [[ "$KEYFRAMES" =~ ^[0-9]+$ ]] || [[ "$KEYFRAMES" -lt 2 ]]; then
log_error "Keyframes must be a positive integer >= 2"
exit 1
fi
if ! [[ "$KEYFRAME_DURATION" =~ ^[0-9]+$ ]] || [[ "$KEYFRAME_DURATION" -lt 1 ]] || [[ "$KEYFRAME_DURATION" -gt 30000 ]]; then
log_error "Keyframe duration must be an integer between 1 and 30000 milliseconds"
exit 1
fi
if ! [[ "$INPUT_SCHEDULES" =~ ^[0-9]+$ ]] || [[ "$INPUT_SCHEDULES" -lt 1 ]]; then
log_error "Input schedules must be a positive integer >= 1"
exit 1
fi
}
#############################################################################
# Entry Point
#############################################################################
main() {
parse_arguments "$@"
log_info "Starting MP4 to GIF conversion..."
log_info "Configuration:"
log_info " Input: $INPUT_FILE"
log_info " Output: $OUTPUT_FILE"
log_info " Keyframes: $KEYFRAMES"
log_info " Frame Duration: ${KEYFRAME_DURATION}ms"
log_info " Schedules: $INPUT_SCHEDULES"
log_info " Transition: $TRANSITION"
log_info " Schedule: $SCHEDULE"
log_info " Magic: $MAGIC"
process_video
}
# Run main function
main "$@"

View File

@@ -1,13 +0,0 @@
#!/bin/bash
if command -v oh-my-posh 2>&1 >/dev/null; then
eval "$(! oh-my-posh init zsh --config=~/worker.omp.json)"
fi
if command -v rbenv 2>&1 >/dev/null; then
eval "$(rbenv init - --no-rehash zsh)"
fi
if command -v pyenv 2>&1 >/dev/null; then
eval "$(pyenv init --path)"
fi

View File

@@ -1,5 +0,0 @@
#!/bin/bash
export NVM_DIR="$HOME/.nvm"
export REPOS_DIR="$HOME/repos"
export CHORE_CHORE="chore: chore"

View File

@@ -1,116 +0,0 @@
#!/bin/bash
is_ssh() {
IS_SSH=$(cat /proc/$PPID/status | head -1 | cut -f2)
if [ "$_IS_SSH" = "sshd-session" ]; then
return 1
else
return 0
fi
}
_home_push() {
git add -A
git commit -m "${1:-$CHORE_CHORE}"
git push $2 $3
}
_home_pull() {
[ -n $(is_ssh) ] && git checkout $HOME/.last_pwd
git stash
git pull $1 $2
git stash pop
}
_site_deploy_jekyll() {
cd "$HOME/repos/$1"
rm -rf _site
JEKYLL_ENV=production bundle exec jekyll build
rsync -avzhe ssh _site/ "pi@hive:$DOCKER_STORAGE_DIR/staticwebserver/hosts/$1/" --delete
cd -
}
_site_deploy_nuxt() {
cd "$HOME/repos/$1"
rm -rf .output
npm run generate
rsync -avzhe ssh .output/public/ "pi@hive:$DOCKER_STORAGE_DIR/staticwebserver/hosts/$1/" --delete
cd -
}
_site_deploy_static() {
cd "$HOME/repos/$1"
rsync -avzhe ssh src/ "pi@hive:$DOCKER_STORAGE_DIR/staticwebserver/hosts/$1/" --delete
cd -
}
_site_run_jekyll() {
cd "$HOME/repos/$1"
bundle exec jekyll serve --livereload
cd -
}
_site_run_nuxt() {
cd "$HOME/repos/$1"
npm run dev
cd -
}
_site_run_static() {
cd "$HOME/repos/$1"
python -m http.server 8000 -d src
cd -
}
batch_file_sequence() {
a=0
for i in *.$2; do
new=$(printf "$1-%03d.$2" "$a")
mv -i -- "$i" "$new"
let a="$a+1"
done
}
batch_image_webp() {
find . -type f -regex ".*\.\(jpg\|jpeg\|png\)" -exec mogrify -format webp {} \; -print
find . -type f -regex ".*\.\(jpg\|jpeg\|png\)" -exec rm {} \; -print
}
batch_video_x264() {
find . -type f -regex ".*\.\(mp4\)" -exec ffmpeg -i {} -vcodec libx264 -crf 24 "{}.mp4" \; -print
find . -type f -regex ".+mp4\.mp4" -exec python3 -c "import os;os.rename('{}', '{}'[:-4])" \; -print
}
_image_description() {
identify -verbose $1 | grep ImageDescription | sed "s/ exif:ImageDescription: //"
}
_image_optimize() {
i_x4 && cp -rf x4/* . && i_x05 && cp -rf x05/* . && _file_sequence $1 webp && mv $1-000.webp $1.webp
_image_description *.png
rm -rf *.png x4 x05
}
_video_optimize() {
filename=$(basename -- "$1")
extension="${filename##*.}"
filename="${filename%.*}"
ffmpeg -y -i $1 -vf "setpts=1.25*PTS" -r 24 "$filename.mp4"
}
function _over_subdirs {
_PWD=$PWD
. $PWD/.env
for D in $(find . -mindepth 1 -maxdepth 1 -type d); do
cd "$D" && $1
cd $_PWD
done
}
_join_by() {
local d=${1-} f=${2-}
if shift 2; then
printf %s "$f" "${@/#/$d}"
fi
}

View File

View File

@@ -1,41 +0,0 @@
#!/bin/bash
if [ -f "$HOME/.init/path.sh" ] ; then
. "$HOME/.init/path.sh"
fi
if [ -f "$HOME/.init/export.sh" ] ; then
. "$HOME/.init/export.sh"
fi
if [ -f "$HOME/.init/alias.sh" ] ; then
. "$HOME/.init/alias.sh"
fi
if [ -f "$HOME/.init/source.sh" ] ; then
. "$HOME/.init/source.sh"
fi
if [ -f "$HOME/.init/functions.sh" ] ; then
. "$HOME/.init/functions.sh"
fi
if [ -f "$HOME/.init/links.sh" ] ; then
. "$HOME/.init/links.sh"
fi
if [ -f "$HOME/.init/source.sh" ] ; then
. "$HOME/.init/source.sh"
fi
if [ -f "$HOME/.init/eval.sh" ] ; then
. "$HOME/.init/eval.sh"
fi
if [ -f "$HOME/.init/trap.sh" ] ; then
. "$HOME/.init/trap.sh"
fi
if [ -f "$HOME/.init/start.sh" ] ; then
. "$HOME/.init/start.sh"
fi

View File

@@ -1 +0,0 @@
#!/bin/bash

View File

@@ -1,67 +0,0 @@
#!/bin/bash
if [ -d "$HOME/bin" ]; then
export PATH="$HOME/bin:$PATH"
fi
if [ -d "$HOME/.local/bin" ]; then
export PATH="$HOME/.local/bin:$PATH"
fi
if [ -d "$HOME/.rvm/bin" ]; then
export PATH="$HOME/.rvm/bin:$PATH"
fi
if [ -d "$HOME/repos/flutter/bin" ]; then
export PATH="$HOME/repos/flutter/bin:$PATH"
fi
if [ -d "$HOME/.rbenv/bin" ]; then
export PATH="$PATH:$HOME/.rbenv/bin"
fi
if [ -d "$HOME/.pyenv/bin" ]; then
export PYENV_ROOT="$HOME/.pyenv"
export PATH="$PYENV_ROOT/bin:$PATH"
fi
if [ -d "$HOME/.cargo/bin" ]; then
export PATH="$PATH:$HOME/.cargo/bin"
fi
if [ -d "/opt/Upscayl/resources/bin" ]; then
export PATH="$PATH:/opt/Upscayl/resources/bin"
fi
if [ -d "/usr/local/go/bin" ]; then
export PATH="$PATH:/usr/local/go/bin"
fi
if [ -d "$HOME/go/bin" ]; then
export PATH="$PATH:$HOME/go/bin"
fi
if [ -d "$HOME/node_modules/.bin" ]; then
export PATH="$PATH:$HOME/node_modules/.bin"
fi
if [ -d "$HOME/miniconda3/bin" ]; then
export PATH="$PATH:$HOME/miniconda3/bin"
fi
if [ -d "$HOME/.local/share/flatpak/exports/share" ] ; then
export XDG_DATA_DIRS="$XDG_DATA_DIRS:$HOME/.local/share/flatpak/exports/share"
fi
if [ -d "/var/lib/flatpak/exports/share" ] ; then
export XDG_DATA_DIRS="$XDG_DATA_DIRS:/var/lib/flatpak/exports/share"
fi
if [ -d "$HOME/.init/bin" ] ; then
export PATH="$PATH:$HOME/.init/bin"
fi
if [ -d "$HOME/Projects/kompose" ] ; then
export PATH="$PATH:$HOME/Projects/kompose"
fi
export PATH="$PATH:/home/valknar/Projects/zed/universal-lsp/target/release"

View File

@@ -1,28 +0,0 @@
#!/bin/bash
if [ -n "$BASH_VERSION" ]; then
# include .bashrc if it exists
if [ -f "$HOME/.bashrc" ]; then
. "$HOME/.bashrc"
fi
fi
if [ -s "$NVM_DIR/nvm.sh" ] ; then
. "$NVM_DIR/nvm.sh"
fi
if [ -s "$NVM_DIR/bash_completion" ] ; then
. "$NVM_DIR/bash_completion"
fi
if [ -s "$HOME/.rvm/scripts/rvm" ] ; then
. "$HOME/.rvm/scripts/rvm"
fi
if [ -s "$HOME/.cargo/env" ] ; then
. "$HOME/.cargo/env"
fi
# if [ -s "$HOME/.gvm/scripts/gvm" ]; then
# . "$HOME/.gvm/scripts/gvm"
# fi

View File

@@ -1,26 +0,0 @@
#!/bin/bash
ssh-add &>/dev/null
# SSH_ENV="$HOME/.ssh/agent-environment"
# function start_agent {
# echo "Initialising new SSH agent..."
# /usr/bin/ssh-agent | sed 's/^echo/#echo/' >"$SSH_ENV"
# echo succeeded
# chmod 600 "$SSH_ENV"
# . "$SSH_ENV" >/dev/null
# /usr/bin/ssh-add;
# }
# # Source SSH settings, if applicable
# if [ -f "$SSH_ENV" ]; then
# . "$SSH_ENV" >/dev/null
# #ps $SSH_AGENT_PID doesn't work under Cygwin
# ps -ef | grep $SSH_AGENT_PID | grep ssh-agent$ >/dev/null || {
# start_agent
# }
# else
# start_agent
# fi

View File

@@ -1,13 +0,0 @@
#!/bin/bash
TRAPINT() {
}
TRAPQUIT() {
}
TRAPTERM() {
}
TRAPEXIT() {
}

85
.zshrc
View File

@@ -1,6 +1,75 @@
if [ -f "$HOME/.init/init.sh" ] ; then
. "$HOME/.init/init.sh"
if [ -d "$HOME/bin" ]; then
export PATH="$HOME/bin:$PATH"
fi
if [ -d "$HOME/.local/bin" ]; then
export PATH="$HOME/.local/bin:$PATH"
fi
if [ -d "$HOME/.rbenv/bin" ]; then
export PATH="$PATH:$HOME/.rbenv/bin"
fi
if [ -d "$HOME/.pyenv/bin" ]; then
export PYENV_ROOT="$HOME/.pyenv"
export PATH="$PYENV_ROOT/bin:$PATH"
fi
if [ -d "$HOME/.cargo/bin" ]; then
export PATH="$PATH:$HOME/.cargo/bin"
fi
if [ -d "/usr/local/go/bin" ]; then
export PATH="$PATH:/usr/local/go/bin"
fi
if [ -d "$HOME/go/bin" ]; then
export PATH="$PATH:$HOME/go/bin"
fi
if [ -d "$HOME/node_modules/.bin" ]; then
export PATH="$PATH:$HOME/node_modules/.bin"
fi
if [ -n "$BASH_VERSION" ]; then
# include .bashrc if it exists
if [ -f "$HOME/.bashrc" ]; then
. "$HOME/.bashrc"
fi
fi
if [ -s "$NVM_DIR/nvm.sh" ] ; then
. "$NVM_DIR/nvm.sh"
fi
if [ -s "$NVM_DIR/bash_completion" ] ; then
. "$NVM_DIR/bash_completion"
fi
if [ -s "$HOME/.rvm/scripts/rvm" ] ; then
. "$HOME/.rvm/scripts/rvm"
fi
if [ -s "$HOME/.cargo/env" ] ; then
. "$HOME/.cargo/env"
fi
if [ -s "$HOME/.env" ] ; then
export $(cat "$HOME/.env" | xargs)
fi
if command -v oh-my-posh 2>&1 >/dev/null; then
eval "$(! oh-my-posh init zsh --config=~/worker.omp.json)"
fi
if command -v rbenv 2>&1 >/dev/null; then
eval "$(rbenv init - --no-rehash zsh)"
fi
if command -v pyenv 2>&1 >/dev/null; then
eval "$(pyenv init --path)"
fi
# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
# Initialization code that may require console input (password prompts, [y/n]
# confirmations, etc.) must go above this block; everything else may go below.
@@ -117,14 +186,6 @@ source $ZSH/oh-my-zsh.sh
[[ ! -f ~/.p10k.zsh ]] || source ~/.p10k.zsh
# _home_pull
alias g1='git reset $(git commit-tree "HEAD^{tree}" -m "A new start")'
cd "$HOME/$(cat $HOME/.last_pwd)" &>/dev/null
# pnpm
export PNPM_HOME="/home/valknar/.local/share/pnpm"
case ":$PATH:" in
*":$PNPM_HOME:"*) ;;
*) export PATH="$PNPM_HOME:$PATH" ;;
esac
# pnpm end
export PATH="$PATH:/home/valknar/Projects/zed/universal-lsp/target/release"