massive updates

This commit is contained in:
Mark Randall Havens 2025-06-06 00:03:54 -05:00
parent a1d16f2903
commit 27a5f0ff1e
28 changed files with 3174 additions and 765 deletions

699
dev/publish_osf.sh Executable file
View file

@ -0,0 +1,699 @@
#!/usr/bin/env bash
set -uo pipefail
# === Constants and Paths ===
BASEDIR="$(pwd)"
OSF_YAML="$BASEDIR/osf.yaml"
GITFIELD_DIR="$BASEDIR/.gitfield"
LOG_DIR="$GITFIELD_DIR/logs"
SCAN_LOG_INIT="$GITFIELD_DIR/scan_log.json"
SCAN_LOG_PUSH="$GITFIELD_DIR/push_log.json"
TMP_JSON_TOKEN="$GITFIELD_DIR/tmp_token.json"
TMP_JSON_PROJECT="$GITFIELD_DIR/tmp_project.json"
TOKEN_PATH="$HOME/.local/gitfieldlib/osf.token"
mkdir -p "$GITFIELD_DIR" "$LOG_DIR" "$(dirname "$TOKEN_PATH")"
# === Logging ===
log() {
local level="$1" msg="$2"
echo "[$(date -Iseconds)] [$level] $msg" >> "$LOG_DIR/gitfield_$(date +%Y%m%d).log"
if [[ "$level" == "ERROR" || "$level" == "INFO" || "$VERBOSE" == "true" ]]; then
echo "[$(date -Iseconds)] [$level] $msg" >&2
fi
}
error() {
log "ERROR" "$1"
exit 1
}
# === Dependency Check ===
require_yq() {
if ! command -v yq &>/dev/null || ! yq --version 2>/dev/null | grep -q 'version v4'; then
log "INFO" "Installing 'yq' (Go version)..."
YQ_BIN="/usr/local/bin/yq"
ARCH=$(uname -m)
case $ARCH in
x86_64) ARCH=amd64 ;;
aarch64) ARCH=arm64 ;;
*) error "Unsupported architecture: $ARCH" ;;
esac
curl -sL "https://github.com/mikefarah/yq/releases/download/v4.43.1/yq_linux_${ARCH}" -o yq \
&& chmod +x yq && sudo mv yq "$YQ_BIN"
log "INFO" "'yq' installed to $YQ_BIN"
fi
}
require_jq() {
if ! command -v jq &>/dev/null; then
log "INFO" "Installing 'jq'..."
sudo apt update && sudo apt install -y jq
log "INFO" "'jq' installed"
fi
}
require_yq
require_jq
# === Token Retrieval ===
get_token() {
if [[ -z "${OSF_TOKEN:-}" ]]; then
if [[ -f "$TOKEN_PATH" ]]; then
OSF_TOKEN=$(<"$TOKEN_PATH")
else
echo -n "🔐 Enter your OSF_TOKEN: " >&2
read -rs OSF_TOKEN
echo >&2
echo "$OSF_TOKEN" > "$TOKEN_PATH"
chmod 600 "$TOKEN_PATH"
log "INFO" "Token saved to $TOKEN_PATH"
fi
fi
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_TOKEN" "https://api.osf.io/v2/users/me/" \
-H "Authorization: Bearer $OSF_TOKEN")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
[[ "$HTTP_CODE" == "200" ]] || error "Invalid OSF token (HTTP $HTTP_CODE)"
}
# === Auto-Generate osf.yaml ===
init_mode() {
log "INFO" "Scanning project directory..."
mapfile -t ALL_FILES < <(find "$BASEDIR" -type f \( \
-name '*.md' -o -name '*.pdf' -o -name '*.tex' -o -name '*.csv' -o -name '*.txt' \
-o -name '*.rtf' -o -name '*.doc' -o -name '*.docx' -o -name '*.odt' \
-o -name '*.xls' -o -name '*.xlsx' -o -name '*.ods' -o -name '*.ppt' -o -name '*.pptx' \
-o -name '*.odp' -o -name '*.jpg' -o -name '*.jpeg' -o -name '*.png' -o -name '*.gif' \
-o -name '*.svg' -o -name '*.tiff' -o -name '*.bmp' -o -name '*.webp' \
-o -name '*.sh' -o -name '*.py' -o -name '*.rb' -o -name '*.pl' -o -name '*.js' \
-o -name '*.yaml' -o -name '*.yml' -o -name '*.json' -o -name '*.xml' \
-o -name 'LICENSE*' -o -name 'COPYING*' \
\) ! -path "*/.git/*" ! -path "*/.gitfield/*" ! -path "*/.legacy-gitfield/*" | sort -u)
if [[ ${#ALL_FILES[@]} -gt 0 ]]; then
IGNORED_FILES=$(git check-ignore "${ALL_FILES[@]}" 2>/dev/null || true)
if [[ -n "$IGNORED_FILES" ]]; then
log "INFO" "Ignored files due to .gitignore: $IGNORED_FILES"
mapfile -t ALL_FILES < <(printf '%s\n' "${ALL_FILES[@]}" | grep -vF "$IGNORED_FILES" | sort -u)
fi
fi
[[ ${#ALL_FILES[@]} -gt 0 ]] || log "WARN" "No files detected in the repository!"
log "INFO" "Files detected: ${ALL_FILES[*]}"
detect_file() {
local keywords=("$@")
for file in "${ALL_FILES[@]}"; do
for kw in "${keywords[@]}"; do
if [[ "${file,,}" == *"${kw,,}"* ]]; then
echo "$file"
return 0
fi
done
done
}
WIKI_PATH=$(detect_file "wiki.md" "wiki" "home.md")
README_PATH=$(detect_file "readme.md" "README.md")
PAPER_PATH=$(detect_file "main.pdf" "theory.pdf" "paper.pdf" "manuscript.pdf")
DOCS=()
ESSAYS=()
IMAGES=()
SCRIPTS=()
DATA=()
FILES=()
for f in "${ALL_FILES[@]}"; do
case "$f" in
"$WIKI_PATH"|"$README_PATH"|"$PAPER_PATH") continue ;;
esac
if [[ "$f" =~ \.(jpg|jpeg|png|gif|svg|tiff|bmp|webp)$ ]]; then
IMAGES+=("$f")
elif [[ "$f" =~ \.(sh|py|rb|pl|js)$ ]]; then
SCRIPTS+=("$f")
elif [[ "$f" =~ \.(csv|json|xml|yaml|yml)$ ]]; then
DATA+=("$f")
elif [[ "$f" =~ \.(md|pdf|tex|doc|docx|odt|xls|xlsx|ods|ppt|pptx|odp|txt|rtf)$ ]] || [[ "$(basename "$f")" =~ ^(LICENSE|COPYING) ]]; then
if [[ "$f" =~ /docs/ ]] || [[ "${f,,}" =~ (guide|tutorial|howto|manual|documentation|workflow|readme) ]]; then
DOCS+=("$f")
elif [[ "$f" =~ /essays/|/notes/ ]] || [[ "${f,,}" =~ (essay|note|draft|reflection) ]]; then
ESSAYS+=("$f")
else
FILES+=("$f")
fi
fi
done
log "INFO" "Generating osf.yaml..."
{
echo "# osf.yaml - Configuration for publishing to OSF"
echo "# Generated on $(date -Iseconds)"
echo "# Edit this file to customize what gets published to OSF."
echo
echo "title: \"$(basename "$BASEDIR")\""
echo "description: \"Auto-generated by GitField OSF publisher on $(date -Iseconds)\""
echo "category: \"project\""
echo "public: false"
echo "tags: [gitfield, auto-generated]"
echo
echo "# Wiki: Main wiki page for your OSF project (wiki.md, home.md)."
if [[ -n "$WIKI_PATH" ]]; then
echo "wiki:"
echo " path: \"${WIKI_PATH#$BASEDIR/}\""
echo " overwrite: true"
else
echo "# wiki: Not found. Place a 'wiki.md' in your repository to auto-detect."
fi
echo
echo "# Readme: Main README file (readme.md, README.md)."
if [[ -n "$README_PATH" ]]; then
echo "readme:"
echo " path: \"${README_PATH#$BASEDIR/}\""
else
echo "# readme: Not found. Place a 'README.md' in your repository root."
fi
echo
echo "# Paper: Primary academic paper (main.pdf, paper.pdf)."
if [[ -n "$PAPER_PATH" ]]; then
echo "paper:"
echo " path: \"${PAPER_PATH#$BASEDIR/}\""
echo " name: \"$(basename "$PAPER_PATH")\""
else
echo "# paper: Not found. Place a PDF (e.g., 'main.pdf') in your repository."
fi
if ((${#DOCS[@]})); then
echo
echo "# Docs: Documentation files (.md, .pdf, etc.) in docs/ or with keywords like 'guide'."
echo "docs:"
for doc in "${DOCS[@]}"; do
relative_path="${doc#$BASEDIR/}"
echo " - path: \"$relative_path\""
echo " name: \"$relative_path\""
done
fi
if ((${#ESSAYS[@]})); then
echo
echo "# Essays: Written essays (.md, .pdf, etc.) in essays/ or with keywords like 'essay'."
echo "essays:"
for essay in "${ESSAYS[@]}"; do
relative_path="${essay#$BASEDIR/}"
echo " - path: \"$relative_path\""
echo " name: \"$relative_path\""
done
fi
if ((${#IMAGES[@]})); then
echo
echo "# Images: Image files (.jpg, .png, etc.)."
echo "images:"
for image in "${IMAGES[@]}"; do
relative_path="${image#$BASEDIR/}"
echo " - path: \"$relative_path\""
echo " name: \"$relative_path\""
done
fi
if ((${#SCRIPTS[@]})); then
echo
echo "# Scripts: Executable scripts (.sh, .py, etc.) in bin/, scripts/, or tools/."
echo "scripts:"
for script in "${SCRIPTS[@]}"; do
relative_path="${script#$BASEDIR/}"
echo " - path: \"$relative_path\""
echo " name: \"$relative_path\""
done
fi
if ((${#DATA[@]})); then
echo
echo "# Data: Structured data files (.csv, .yaml, etc.)."
echo "data:"
for datum in "${DATA[@]}"; do
relative_path="${datum#$BASEDIR/}"
echo " - path: \"$relative_path\""
echo " name: \"$relative_path\""
done
fi
if ((${#FILES[@]})); then
echo
echo "# Files: Miscellaneous files (.md, LICENSE, etc.)."
echo "files:"
for file in "${FILES[@]}"; do
relative_path="${file#$BASEDIR/}"
echo " - path: \"$relative_path\""
echo " name: \"$relative_path\""
done
fi
} > "$OSF_YAML"
log "INFO" "Wiki: $WIKI_PATH, Readme: $README_PATH, Paper: $PAPER_PATH"
log "INFO" "Docs: ${DOCS[*]}"
log "INFO" "Essays: ${ESSAYS[*]}"
log "INFO" "Images: ${IMAGES[*]}"
log "INFO" "Scripts: ${SCRIPTS[*]}"
log "INFO" "Data: ${DATA[*]}"
log "INFO" "Files: ${FILES[*]}"
jq -n \
--argjson all "$(printf '%s\n' "${ALL_FILES[@]}" | jq -R . | jq -s .)" \
--argjson docs "$(printf '%s\n' "${DOCS[@]}" | jq -R . | jq -s .)" \
--argjson files "$(printf '%s\n' "${FILES[@]}" | jq -R . | jq -s .)" \
--argjson scripts "$(printf '%s\n' "${SCRIPTS[@]}" | jq -R . | jq -s .)" \
--arg osf_yaml "$OSF_YAML" \
'{detected_files: $all, classified: {docs: $docs, files: $files, scripts: $scripts}, osf_yaml_path: $osf_yaml}' > "$SCAN_LOG_INIT"
log "INFO" "Generated $OSF_YAML and scan log"
echo "✅ osf.yaml generated at $OSF_YAML." >&2
}
# === Generate Default Wiki with Links ===
generate_wiki() {
local wiki_path
wiki_path=$(yq e '.wiki.path' "$OSF_YAML")
if [[ "$wiki_path" != "null" && ! -f "$wiki_path" ]]; then
log "INFO" "Generating default wiki at $wiki_path..."
mkdir -p "$(dirname "$wiki_path")"
{
echo "# Auto-Generated Wiki for $(yq e '.title' "$OSF_YAML")"
echo
echo "## Project Overview"
echo "$(yq e '.description' "$OSF_YAML")"
echo
echo "## Repository Info"
echo "- **Last Commit**: $(git log -1 --pretty=%B 2>/dev/null || echo "No git commits")"
echo "- **Commit Hash**: $(git rev-parse HEAD 2>/dev/null || echo "N/A")"
if [[ -f "$(yq e '.readme.path' "$OSF_YAML")" ]]; then
echo
echo "## README Preview"
head -n 10 "$(yq e '.readme.path' "$OSF_YAML")"
fi
echo
echo "## Internal Documents"
echo "Links to documents uploaded to OSF (will be populated after --push/--overwrite):"
for section in docs essays images scripts data files; do
local count
count=$(yq e ".${section} | length" "$OSF_YAML")
if [[ "$count" != "0" && "$count" != "null" ]]; then
echo
echo "### $(echo "$section" | tr '[:lower:]' '[:upper:]')"
for ((i = 0; i < count; i++)); do
local name
name=$(yq e ".${section}[$i].name" "$OSF_YAML")
echo "- [$name](https://osf.io/{NODE_ID}/files/osfstorage/$name)"
done
fi
done
} > "$wiki_path"
log "INFO" "Default wiki generated at $wiki_path"
fi
}
# === Validate YAML ===
validate_yaml() {
log "INFO" "Validating $OSF_YAML..."
[[ -f "$OSF_YAML" ]] || init_mode
for field in title description category public; do
[[ $(yq e ".$field" "$OSF_YAML") != "null" ]] || error "Missing field: $field in $OSF_YAML"
done
}
# === Validate and Read push_log.json ===
read_project_id() {
if [[ ! -f "$SCAN_LOG_PUSH" ]] || ! jq -e '.' "$SCAN_LOG_PUSH" >/dev/null 2>&1; then
log "WARN" "No valid push_log.json found"
echo ""
return
fi
NODE_ID=$(jq -r '.project_id // ""' "$SCAN_LOG_PUSH")
echo "$NODE_ID"
}
# === Search for Existing Project by Title ===
find_project_by_title() {
local title="$1"
log "INFO" "Searching for project: $title"
if [[ "$DRY_RUN" == "true" ]]; then
echo "dry-run-$(uuidgen)"
return
fi
ENCODED_TITLE=$(jq -r -n --arg title "$title" '$title|@uri')
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_PROJECT" "https://api.osf.io/v2/nodes/?filter[title]=$ENCODED_TITLE" \
-H "Authorization: Bearer $OSF_TOKEN")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ "$HTTP_CODE" != "200" ]]; then
log "WARN" "Failed to search for project (HTTP $HTTP_CODE)"
echo ""
return
fi
NODE_ID=$(jq -r '.data[0].id // ""' "$TMP_JSON_PROJECT")
[[ -n "$NODE_ID" ]] && log "INFO" "Found project '$title': $NODE_ID"
echo "$NODE_ID"
}
# === Upload Helpers ===
sanitize_filename() {
local name="$1"
echo "$name" | tr -d '\n' | sed 's/[^[:alnum:]._-]/_/g'
}
upload_file() {
local path="$1" name="$2"
local sanitized_name encoded_name
sanitized_name=$(sanitize_filename "$name")
encoded_name=$(jq -r -n --arg name "$sanitized_name" '$name|@uri')
log "INFO" "Uploading $name (sanitized: $sanitized_name) from $path"
if [[ "$DRY_RUN" == "true" ]]; then
return 0
fi
CHECK_URL="https://api.osf.io/v2/nodes/$NODE_ID/files/osfstorage/?filter[name]=$encoded_name"
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_PROJECT" "$CHECK_URL" \
-H "Authorization: Bearer $OSF_TOKEN")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
log "WARN" "No HTTP status for $sanitized_name check. Assuming file does not exist."
elif [[ "$HTTP_CODE" == "200" ]]; then
FILE_ID=$(jq -r '.data[0].id // ""' "$TMP_JSON_PROJECT")
if [[ -n "$FILE_ID" ]]; then
if [[ "$MODE" == "overwrite" ]]; then
log "INFO" "Deleting existing file $sanitized_name (ID: $FILE_ID)..."
DEL_RESPONSE=$(curl -s -w "%{http_code}" -X DELETE "https://api.osf.io/v2/files/$FILE_ID/" \
-H "Authorization: Bearer $OSF_TOKEN")
[[ "$DEL_RESPONSE" == "204" ]] || log "WARN" "Failed to delete $sanitized_name (HTTP $DEL_RESPONSE)"
else
log "WARN" "File $sanitized_name exists. Use --overwrite to replace."
return 1
fi
fi
elif [[ "$HTTP_CODE" != "404" ]]; then
log "WARN" "Check for $sanitized_name failed (HTTP $HTTP_CODE)"
fi
UPLOAD_URL="https://files.osf.io/v1/resources/$NODE_ID/providers/osfstorage/?kind=file&name=$encoded_name"
RESPONSE=$(curl -s -w "\n%{http_code}" -X PUT "$UPLOAD_URL" \
-H "Authorization: Bearer $OSF_TOKEN" \
-F "file=@$path")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ "$HTTP_CODE" != "201" ]]; then
log "WARN" "Failed to upload $name (HTTP $HTTP_CODE)"
return 1
fi
echo "📤 Uploaded $name to https://osf.io/$NODE_ID/" >&2
UPLOADED_FILES+=("$name")
return 0
}
upload_group() {
local section="$1"
local count
count=$(yq e ".${section} | length" "$OSF_YAML")
log "INFO" "Uploading $section group ($count items)"
if [[ "$count" == "0" || "$count" == "null" ]]; then
return 0
fi
local success_count=0
for ((i = 0; i < count; i++)); do
local path name
path=$(yq e ".${section}[$i].path" "$OSF_YAML")
name=$(yq e ".${section}[$i].name" "$OSF_YAML")
if [[ -f "$BASEDIR/$path" ]]; then
upload_file "$BASEDIR/$path" "$name" && ((success_count++))
else
log "WARN" "File $path not found, skipping"
fi
done
log "INFO" "Uploaded $success_count/$count items in $section"
return 0
}
upload_wiki() {
local wiki_path
wiki_path=$(yq e '.wiki.path' "$OSF_YAML")
if [[ "$wiki_path" != "null" && -f "$BASEDIR/$wiki_path" ]]; then
log "INFO" "Pushing wiki from $wiki_path"
if [[ "$DRY_RUN" == "true" ]]; then
return 0
fi
# Update wiki content with actual OSF links
local wiki_content
wiki_content=$(cat "$BASEDIR/$wiki_path")
for file in "${UPLOADED_FILES[@]}"; do
wiki_content=$(echo "$wiki_content" | sed "s|https://osf.io/{NODE_ID}/files/osfstorage/$file|https://osf.io/$NODE_ID/files/osfstorage/$file|g")
done
echo "$wiki_content" > "$BASEDIR/$wiki_path.updated"
CONTENT=$(jq -Rs . < "$BASEDIR/$wiki_path.updated")
RESPONSE=$(curl -s -w "\n%{http_code}" -X PATCH "https://api.osf.io/v2/nodes/$NODE_ID/wikis/home/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF
{
"data": {
"type": "wikis",
"attributes": {
"content": $CONTENT
}
}
}
EOF
)
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ "$HTTP_CODE" != "200" ]]; then
log "WARN" "Failed to upload wiki (HTTP $HTTP_CODE)"
return 1
fi
echo "📜 Pushed wiki to https://osf.io/$NODE_ID/" >&2
rm -f "$BASEDIR/$wiki_path.updated"
return 0
fi
log "INFO" "No wiki to upload"
return 0
}
# === Push Mode ===
push_mode() {
local MODE="$1"
validate_yaml
generate_wiki
get_token
local title description category public
title=$(yq e '.title' "$OSF_YAML")
description=$(yq e '.description' "$OSF_YAML")
category=$(yq e '.category' "$OSF_YAML")
public=$(yq e '.public' "$OSF_YAML" | grep -E '^(true|false)$' || error "Invalid 'public' value")
NODE_ID=""
if [[ "$MODE" == "overwrite" || "$MODE" == "push" ]]; then
NODE_ID=$(read_project_id)
if [[ -n "$NODE_ID" ]]; then
log "INFO" "Using existing OSF project ID: $NODE_ID"
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_PROJECT" "https://api.osf.io/v2/nodes/$NODE_ID/" \
-H "Authorization: Bearer $OSF_TOKEN")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ "$HTTP_CODE" != "200" ]]; then
log "WARN" "Project $NODE_ID not found (HTTP $HTTP_CODE)"
NODE_ID=""
fi
fi
fi
if [[ -z "$NODE_ID" ]] && [[ "$MODE" == "overwrite" || "$MODE" == "push" ]]; then
NODE_ID=$(find_project_by_title "$title")
fi
if [[ -z "$NODE_ID" ]]; then
log "INFO" "Creating new OSF project..."
if [[ "$DRY_RUN" == "true" ]]; then
NODE_ID="dry-run-$(uuidgen)"
else
RESPONSE=$(curl -s -w "\n%{http_code}" -X POST "https://api.osf.io/v2/nodes/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF
{
"data": {
"type": "nodes",
"attributes": {
"title": "$title",
"description": "$description",
"category": "$category",
"public": $public
}
}
}
EOF
)
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
[[ "$HTTP_CODE" == "201" ]] || error "Project creation failed (HTTP $HTTP_CODE)"
NODE_ID=$(jq -r '.data.id' "$TMP_JSON_PROJECT")
[[ "$NODE_ID" != "null" && -n "$NODE_ID" ]] || error "No valid OSF project ID returned"
log "INFO" "Project created: $NODE_ID"
fi
fi
[[ -n "$NODE_ID" ]] || error "Failed to determine OSF project ID"
log "INFO" "Starting file uploads to project $NODE_ID"
declare -a UPLOADED_FILES
local overall_success=0
if [[ $(yq e '.readme.path' "$OSF_YAML") != "null" ]]; then
path=$(yq e '.readme.path' "$OSF_YAML")
[[ -f "$BASEDIR/$path" ]] && upload_file "$BASEDIR/$path" "$(basename "$path")" && overall_success=1
fi
if [[ $(yq e '.paper.path' "$OSF_YAML") != "null" ]]; then
path=$(yq e '.paper.path' "$OSF_YAML")
name=$(yq e '.paper.name' "$OSF_YAML")
[[ -f "$BASEDIR/$path" ]] && upload_file "$BASEDIR/$path" "$name" && overall_success=1
fi
upload_group "docs" && overall_success=1
upload_group "essays" && overall_success=1
upload_group "images" && overall_success=1
upload_group "scripts" && overall_success=1
upload_group "data" && overall_success=1
upload_group "files" && overall_success=1
upload_wiki && overall_success=1
if [[ "$DRY_RUN" != "true" ]]; then
jq -n \
--arg node_id "$NODE_ID" \
--arg title "$title" \
--arg pushed_at "$(date -Iseconds)" \
'{project_id: $node_id, project_title: $title, pushed_at: $pushed_at}' > "$SCAN_LOG_PUSH"
fi
if [[ "$overall_success" -eq 1 ]]; then
log "INFO" "OSF Push Complete! View project: https://osf.io/$NODE_ID/"
echo "✅ OSF Push Complete! View project: https://osf.io/$NODE_ID/" >&2
else
error "OSF Push Failed: No files uploaded"
fi
}
# === Validate Mode ===
validate_mode() {
validate_yaml
log "INFO" "Checking file existence..."
for section in readme paper docs essays images scripts data files wiki; do
if [[ "$section" == "docs" || "$section" == "essays" || "$section" == "images" || "$section" == "scripts" || "$section" == "data" || "$section" == "files" ]]; then
local count
count=$(yq e ".${section} | length" "$OSF_YAML")
for ((i = 0; i < count; i++)); do
local path
path=$(yq e ".${section}[$i].path" "$OSF_YAML")
[[ -f "$BASEDIR/$path" ]] || log "WARN" "File $path in $section not found"
done
elif [[ "$section" != "wiki" ]]; then
local path
path=$(yq e ".${section}.path" "$OSF_YAML")
if [[ "$path" != "null" && -n "$path" && ! -f "$BASEDIR/$path" ]]; then
log "WARN" "File $path in $section not found"
fi
fi
done
log "INFO" "Validation complete"
echo "✅ Validation complete. Check logs: $LOG_DIR/gitfield_$(date +%Y%m%d).log" >&2
}
# === Clean Mode ===
clean_mode() {
log "INFO" "Cleaning .gitfield directory..."
rm -rf "$GITFIELD_DIR"
mkdir -p "$GITFIELD_DIR" "$LOG_DIR"
log "INFO" "Cleaned .gitfield directory"
echo "✅ Cleaned .gitfield directory" >&2
}
# === Help Menu ===
show_help() {
local verbose="$1"
if [[ "$verbose" == "true" ]]; then
cat <<EOF
Usage: $0 [OPTION]
Publish content from a Git repository to OSF.
Options:
--init Generate osf.yaml and scan log without pushing to OSF
--push Push to existing OSF project or create new
--overwrite Reuse existing OSF project and overwrite files
--force Alias for --overwrite
--dry-run Simulate actions (use with --push or --overwrite)
--validate Check osf.yaml and file existence without pushing
--clean Remove .gitfield logs and start fresh
--help Show this help message (--help --verbose for more details)
Examples:
$0 --init # Create osf.yaml based on repo contents
$0 --push # Push to OSF
$0 --overwrite # Push to OSF, overwriting files
$0 --dry-run --push # Simulate a push
Repository Structure and Supported Files:
- Wiki: wiki.md, home.md (root or docs/)
- Readme: readme.md, README.md (root)
- Paper: main.pdf, paper.pdf (root or docs/)
- Docs: .md, .pdf, etc., in docs/ or with keywords like 'guide'
- Essays: .md, .pdf, etc., in essays/ or with keywords like 'essay'
- Images: .jpg, .png, etc., in any directory
- Scripts: .sh, .py, etc., in bin/, scripts/, or tools/
- Data: .csv, .yaml, etc., in any directory
- Files: Miscellaneous files (.md, LICENSE, etc.)
After running --init, open osf.yaml to customize.
EOF
else
cat <<EOF
Usage: $0 [OPTION]
Publish content from a Git repository to OSF.
Options:
--init Generate osf.yaml
--push Push to OSF
--overwrite Push to OSF, overwrite files
--force Alias for --overwrite
--dry-run Simulate actions (with --push/--overwrite)
--validate Check osf.yaml and files
--clean Remove .gitfield logs
--help Show this help (--help --verbose for more)
Examples:
$0 --init # Create osf.yaml
$0 --push # Push to OSF
EOF
fi
}
# === CLI Dispatcher ===
DRY_RUN="false"
VERBOSE="false"
MODE=""
while [[ $# -gt 0 ]]; do
case "$1" in
--init) MODE="init" ;;
--push) MODE="push" ;;
--overwrite|--force) MODE="overwrite" ;;
--dry-run) DRY_RUN="true" ;;
--validate) MODE="validate" ;;
--clean) MODE="clean" ;;
--verbose) VERBOSE="true" ;;
--help) show_help "$VERBOSE"; exit 0 ;;
*) echo "Unknown option: $1" >&2; show_help "false"; exit 1 ;;
esac
shift
done
case "$MODE" in
init) init_mode ;;
push|overwrite) push_mode "$MODE" ;;
validate) validate_mode ;;
clean) clean_mode ;;
*) show_help "false"; exit 0 ;;
esac

267
dev/publish_osf.sh-working Executable file
View file

@ -0,0 +1,267 @@
#!/usr/bin/env bash
set -euo pipefail
# === Constants and Paths ===
BASEDIR="$(pwd)"
OSF_YAML="$BASEDIR/osf.yaml"
GITFIELD_DIR="$BASEDIR/.gitfield"
mkdir -p "$GITFIELD_DIR"
SCAN_LOG_INIT="$GITFIELD_DIR/scan_log.json"
SCAN_LOG_PUSH="$GITFIELD_DIR/push_log.json"
TMP_JSON="$GITFIELD_DIR/tmp_project.json"
TOKEN_PATH="$HOME/.local/gitfieldlib/osf.token"
mkdir -p "$(dirname "$TOKEN_PATH")"
# === Dependency Check & Auto-Install ===
require_yq() {
if ! command -v yq &>/dev/null || ! yq --version 2>/dev/null | grep -q 'version 4'; then
echo "⚠️ Correct 'yq' (Go version) not found. Installing from GitHub..."
YQ_BIN="/usr/local/bin/yq"
ARCH=$(uname -m)
case $ARCH in
x86_64) ARCH=amd64 ;;
aarch64) ARCH=arm64 ;;
*) echo "❌ Unsupported architecture: $ARCH" && exit 1 ;;
esac
YQ_VERSION="v4.43.1"
curl -Lo yq "https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_${ARCH}" \
&& chmod +x yq && sudo mv yq "$YQ_BIN"
echo "✅ 'yq' installed to $YQ_BIN"
fi
}
require_jq() {
if ! command -v jq &>/dev/null; then
echo "⚠️ 'jq' not found. Installing..."
sudo apt update && sudo apt install -y jq
echo "✅ 'jq' installed."
fi
}
require_yq
require_jq
# === Token Retrieval ===
if [[ -z "${OSF_TOKEN:-}" ]]; then
if [[ -f "$TOKEN_PATH" ]]; then
OSF_TOKEN=$(<"$TOKEN_PATH")
else
echo -n "🔐 Enter your OSF_TOKEN (stored for future use): "
read -rs OSF_TOKEN
echo
echo "$OSF_TOKEN" > "$TOKEN_PATH"
chmod 600 "$TOKEN_PATH"
echo "📁 Token saved to $TOKEN_PATH"
fi
fi
# === INIT MODE ===
init_mode() {
echo "🔍 Scanning project directory..."
mapfile -t ALL_FILES < <(find "$BASEDIR" -type f \( -name '*.md' -o -name '*.pdf' -o -name '*.tex' \) ! -path "*/.git/*" ! -path "*/.gitfield/*")
detect_file() {
local keywords=("$@")
for file in "${ALL_FILES[@]}"; do
for kw in "${keywords[@]}"; do
if [[ "${file,,}" == *"$kw"* ]]; then
echo "$file"
return 0
fi
done
done
}
WIKI_PATH=$(detect_file "wiki.md" "wiki")
README_PATH=$(detect_file "readme.md")
PAPER_PATH=$(detect_file "main.pdf" "theory.pdf" "paper.pdf")
ESSAYS=()
FILES=()
for f in "${ALL_FILES[@]}"; do
case "$f" in
"$WIKI_PATH"|"$README_PATH"|"$PAPER_PATH") continue ;;
*essays/*|*notes/*|*docs/*) ESSAYS+=("$f") ;;
*) FILES+=("$f") ;;
esac
done
echo "📝 Generating osf.yaml..."
{
echo "title: \"$(basename "$BASEDIR")\""
echo "description: \"Auto-generated by GitField OSF publisher\""
echo "category: \"project\""
echo "public: false"
echo "tags: [gitfield, auto-generated]"
[[ -n "$WIKI_PATH" ]] && echo -e "\nwiki:\n path: \"${WIKI_PATH#$BASEDIR/}\"\n overwrite: true"
[[ -n "$README_PATH" ]] && echo -e "\nreadme:\n path: \"${README_PATH#$BASEDIR/}\""
[[ -n "$PAPER_PATH" ]] && echo -e "\npaper:\n path: \"${PAPER_PATH#$BASEDIR/}\"\n name: \"$(basename "$PAPER_PATH")\""
if ((${#ESSAYS[@]})); then
echo -e "\nessays:"
for essay in "${ESSAYS[@]}"; do
echo " - path: \"${essay#$BASEDIR/}\""
echo " name: \"$(basename "$essay")\""
done
fi
if ((${#FILES[@]})); then
echo -e "\nfiles:"
for file in "${FILES[@]}"; do
echo " - path: \"${file#$BASEDIR/}\""
echo " name: \"$(basename "$file")\""
done
fi
} > "$OSF_YAML"
jq -n \
--argjson all "$(printf '%s\n' "${ALL_FILES[@]}" | jq -R . | jq -s .)" \
--arg wiki "$WIKI_PATH" \
--arg readme "$README_PATH" \
--arg paper "$PAPER_PATH" \
--argjson essays "$(printf '%s\n' "${ESSAYS[@]}" | jq -R . | jq -s .)" \
--argjson files "$(printf '%s\n' "${FILES[@]}" | jq -R . | jq -s .)" \
--arg osf_yaml "$OSF_YAML" \
'{
detected_files: $all,
classified: {
wiki: $wiki,
readme: $readme,
paper: $paper,
essays: $essays,
files: $files
},
osf_yaml_path: $osf_yaml
}' > "$SCAN_LOG_INIT"
echo "✅ osf.yaml created at $OSF_YAML"
}
# === PUSH MODE ===
push_mode() {
TITLE=$(yq e '.title' "$OSF_YAML")
DESCRIPTION=$(yq e '.description' "$OSF_YAML")
CATEGORY=$(yq e '.category' "$OSF_YAML")
PUBLIC=$(yq e '.public' "$OSF_YAML")
echo "🚀 Creating OSF project..."
RESPONSE=$(curl -s -w "%{http_code}" -o "$TMP_JSON" -X POST "https://api.osf.io/v2/nodes/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF
{
"data": {
"type": "nodes",
"attributes": {
"title": "$TITLE",
"description": "$DESCRIPTION",
"category": "$CATEGORY",
"public": $PUBLIC
}
}
}
EOF
)
STATUS="${RESPONSE: -3}"
if [[ "$STATUS" != "201" ]]; then
echo "❌ Project creation failed with status $STATUS"
echo "🧾 Response:"
cat "$TMP_JSON"
exit 1
fi
NODE_ID=$(jq -r '.data.id' "$TMP_JSON")
if [[ "$NODE_ID" == "null" || -z "$NODE_ID" ]]; then
echo "❌ No valid OSF project ID returned."
cat "$TMP_JSON"
exit 1
fi
echo "📡 Project created: $NODE_ID"
upload_file() {
local path="$1"
local name="$2"
echo "📁 Uploading $name from $path..."
UPLOAD_URL="https://files.osf.io/v1/resources/$NODE_ID/providers/osfstorage/?kind=file&name=$(basename "$name")"
curl -s -X PUT "$UPLOAD_URL" \
-H "Authorization: Bearer $OSF_TOKEN" \
-F "file=@$path" > /dev/null
}
upload_group() {
local section="$1"
local count
count=$(yq e ".${section} | length" "$OSF_YAML")
for ((i = 0; i < count; i++)); do
local path
path=$(yq e ".${section}[$i].path" "$OSF_YAML")
local name
name=$(yq e ".${section}[$i].name" "$OSF_YAML")
upload_file "$path" "$name"
done
}
[[ $(yq e '.readme.path' "$OSF_YAML") != "null" ]] && {
path=$(yq e '.readme.path' "$OSF_YAML")
upload_file "$path" "$(basename "$path")"
}
[[ $(yq e '.paper.path' "$OSF_YAML") != "null" ]] && {
path=$(yq e '.paper.path' "$OSF_YAML")
name=$(yq e '.paper.name' "$OSF_YAML")
upload_file "$path" "$name"
}
upload_group "files"
upload_group "essays"
if [[ $(yq e '.wiki.path' "$OSF_YAML") != "null" ]]; then
WIKI_PATH=$(yq e '.wiki.path' "$OSF_YAML")
echo "📜 Pushing wiki from $WIKI_PATH..."
CONTENT=$(jq -Rs . < "$WIKI_PATH")
curl -s -X PATCH "https://api.osf.io/v2/nodes/$NODE_ID/wikis/home/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF > /dev/null
{
"data": {
"type": "wikis",
"attributes": {
"content": $CONTENT
}
}
}
EOF
fi
jq -n \
--arg node_id "$NODE_ID" \
--arg pushed_at "$(date -Iseconds)" \
--arg token_path "$TOKEN_PATH" \
'{
project_id: $node_id,
pushed_at: $pushed_at,
token_used: $token_path
}' > "$SCAN_LOG_PUSH"
echo "✅ OSF Push Complete!"
echo "🌐 View project: https://osf.io/$NODE_ID/"
}
# === Dispatcher ===
case "${1:-}" in
--init | init) init_mode ;;
--push | push) push_mode ;;
*)
echo "Usage: $0 [--init | --push]"
exit 1
;;
esac

58
dev/publish_osf_wiki.sh Executable file
View file

@ -0,0 +1,58 @@
# Auto-Generated Wiki for git-sigil
## Project Overview
Auto-generated by GitField OSF publisher on 2025-06-05T23:01:38-05:00
## Repository Info
- **Last Commit**: got publish_osf.sh working
- **Commit Hash**: a1d16f2903e1d79b846ed969804810f245e169b8
## README Preview
# 🌱 GitField: Multi-Platform Repository Sync for Resilience and Sovereignty
## 📜 Overview
**GitField** is a collection of Bash scripts designed to synchronize a Git repository across **Radicle**, **GitLab**, **Bitbucket**, and **GitHub** using a recursive, metadata-rich workflow. This project ensures **redundancy**, **sovereignty**, and **transparency** by generating interconnected metadata snapshots and distributing them across decentralized and centralized platforms. The strategy protects against deplatforming risks, motivated by past attempts to suppress this work by individuals such as **Mr. Joel Johnson** ([Mirror post](https://mirror.xyz/neutralizingnarcissism.eth/x40_zDWWrYOJ7nh8Y0fk06_3kNEP0KteSSRjPmXkiGg?utm_medium=social&utm_source=heylink.me)) and **Dr. Peter Gaied** ([Paragraph post](https://paragraph.com/@neutralizingnarcissism/%F0%9F%9C%81-the-narcissistic-messiah)). By prioritizing decentralization with a Radicle-first approach and recursively pushing metadata, GitField creates a resilient, auditable chain of project state, ensuring persistence and accessibility for collaborators, communities, and future AI systems.
## 🛡️ Purpose and Intention
The GitField project is driven by three core principles:
## Internal Documents
Links to documents uploaded to OSF:
### DOCS
- [docs/bitbucket/CLI-ONLY_workflow_bitbucket_Ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/bitbucket/CLI-ONLY_workflow_bitbucket_Ubuntu.md)
- [docs/bitbucket/CLI-ONLY_workflow_bitbucket_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/bitbucket/CLI-ONLY_workflow_bitbucket_ubuntu.md)
- [docs/generated_wiki.md](https://osf.io/uvzx7/files/osfstorage/docs/generated_wiki.md)
- [docs/github/1_prerequisites_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/1_prerequisites_github_ubuntu.md)
- [docs/github/2_create_remote_repo_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/2_create_remote_repo_github_ubuntu.md)
- [docs/github/3_commit_existing_repo_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/3_commit_existing_repo_github_ubuntu.md)
- [docs/github/CLI-ONLY_workflow_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/CLI-ONLY_workflow_github_ubuntu.md)
- [docs/gitlab/1_prerequisites_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/1_prerequisites_gitlab_ubuntu.md)
- [docs/gitlab/2_create_remote_repo_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/2_create_remote_repo_gitlab_ubuntu.md)
- [docs/gitlab/3_commit_existing_repo_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/3_commit_existing_repo_gitlab_ubuntu.md)
- [docs/gitlab/CLI-ONLY_workflow_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/CLI-ONLY_workflow_gitlab_ubuntu.md)
- [docs/osf/old/for_radicle.md](https://osf.io/uvzx7/files/osfstorage/docs/osf/old/for_radicle.md)
- [docs/radicle/for_radicle.md](https://osf.io/uvzx7/files/osfstorage/docs/radicle/for_radicle.md)
### SCRIPTS
- [INSTALL.sh](https://osf.io/uvzx7/files/osfstorage/INSTALL.sh)
- [bin/gitfield-sync-gdrive.sh](https://osf.io/uvzx7/files/osfstorage/bin/gitfield-sync-gdrive.sh)
- [bin/mount-gdrive.sh](https://osf.io/uvzx7/files/osfstorage/bin/mount-gdrive.sh)
- [bin/publish_osf.sh](https://osf.io/uvzx7/files/osfstorage/bin/publish_osf.sh)
- [bin/sync-metadata.sh](https://osf.io/uvzx7/files/osfstorage/bin/sync-metadata.sh)
- [docs/osf/new/test-osf-api.sh](https://osf.io/uvzx7/files/osfstorage/docs/osf/new/test-osf-api.sh)
- [docs/osf/old/test-osf-api.sh](https://osf.io/uvzx7/files/osfstorage/docs/osf/old/test-osf-api.sh)
- [tools/invoke_solaria.py](https://osf.io/uvzx7/files/osfstorage/tools/invoke_solaria.py)
### DATA
- [docs/osf/new/gitfield.osf.yaml](https://osf.io/uvzx7/files/osfstorage/docs/osf/new/gitfield.osf.yaml)
- [docs/osf/old/gitfield.osf.yaml](https://osf.io/uvzx7/files/osfstorage/docs/osf/old/gitfield.osf.yaml)
- [osf.yaml](https://osf.io/uvzx7/files/osfstorage/osf.yaml)
### FILES
- [GITFIELD.md](https://osf.io/uvzx7/files/osfstorage/GITFIELD.md)
- [LICENSE](https://osf.io/uvzx7/files/osfstorage/LICENSE)
- [bin/SolariaSeedPacket_∞.20_SacredMomentEdition.md](https://osf.io/uvzx7/files/osfstorage/bin/SolariaSeedPacket_∞.20_SacredMomentEdition.md)

472
dev/publish_osf_wiki.sh-2 Normal file
View file

@ -0,0 +1,472 @@
#!/usr/bin/env bash
set -uo pipefail
# === Constants and Paths ===
BASEDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
OSF_YAML="$BASEDIR/osf.yaml"
GITFIELD_DIR="$BASEDIR/.gitfield"
LOG_DIR="$GITFIELD_DIR/logs"
SCAN_LOG_PUSH="$GITFIELD_DIR/push_log.json"
TMP_JSON_TOKEN="$GITFIELD_DIR/tmp_token.json"
TMP_JSON_PROJECT="$GITFIELD_DIR/tmp_project.json"
TMP_JSON_WIKI="$GITFIELD_DIR/tmp_wiki.json"
TOKEN_PATH="$HOME/.local/gitfieldlib/osf.token"
mkdir -p "$GITFIELD_DIR" "$LOG_DIR" "$(dirname "$TOKEN_PATH")"
chmod -R u+rw "$GITFIELD_DIR" "$(dirname "$TOKEN_PATH")"
# === Logging ===
log() {
local level="$1" msg="$2"
echo "[$(date -Iseconds)] [$level] $msg" >> "$LOG_DIR/gitfield_wiki_$(date +%Y%m%d).log"
if [[ "$level" == "ERROR" || "$level" == "INFO" || "$VERBOSE" == "true" ]]; then
echo "[$(date -Iseconds)] [$level] $msg" >&2
fi
}
error() {
log "ERROR" "$1"
exit 1
}
# === Dependency Check ===
require_yq() {
if ! command -v yq &>/dev/null || ! yq --version 2>/dev/null | grep -q 'version v4'; then
log "INFO" "Installing 'yq' (Go version)..."
YQ_BIN="/usr/local/bin/yq"
ARCH=$(uname -m)
case $ARCH in
x86_64) ARCH=amd64 ;;
aarch64) ARCH=arm64 ;;
*) error "Unsupported architecture: $ARCH" ;;
esac
curl -sL "https://github.com/mikefarah/yq/releases/download/v4.43.1/yq_linux_${ARCH}" -o yq \
&& chmod +x yq && sudo mv yq "$YQ_BIN"
log "INFO" "'yq' installed to $YQ_BIN"
fi
}
require_jq() {
if ! command -v jq &>/dev/null; then
log "INFO" "Installing 'jq'..."
sudo apt update && sudo apt install -y jq
log "INFO" "'jq' installed"
fi
}
require_curl() {
if ! command -v curl &>/dev/null; then
log "INFO" "Installing 'curl'..."
sudo apt update && sudo apt install -y curl
log "INFO" "'curl' installed"
fi
CURL_VERSION=$(curl --version | head -n 1)
log "INFO" "Using curl version: $CURL_VERSION"
}
require_yq
require_jq
require_curl
# === Token Retrieval ===
get_token() {
if [[ -z "${OSF_TOKEN:-}" ]]; then
if [[ -f "$TOKEN_PATH" ]]; then
OSF_TOKEN=$(tr -d '\n' < "$TOKEN_PATH")
if [[ -z "$OSF_TOKEN" ]]; then
log "ERROR" "OSF token file $TOKEN_PATH is empty"
echo -n "🔐 Enter your OSF_TOKEN: " >&2
read -rs OSF_TOKEN
echo >&2
echo "$OSF_TOKEN" > "$TOKEN_PATH"
chmod 600 "$TOKEN_PATH"
log "INFO" "Token saved to $TOKEN_PATH"
fi
else
echo -n "🔐 Enter your OSF_TOKEN: " >&2
read -rs OSF_TOKEN
echo >&2
echo "$OSF_TOKEN" > "$TOKEN_PATH"
chmod 600 "$TOKEN_PATH"
log "INFO" "Token saved to $TOKEN_PATH"
fi
fi
log "DEBUG" "OSF_TOKEN length: ${#OSF_TOKEN}"
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_TOKEN" "https://api.osf.io/v2/users/me/" \
-H "Authorization: Bearer $OSF_TOKEN" 2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
error "Failed to validate OSF token: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
if [[ "$HTTP_CODE" != "200" ]]; then
RESPONSE_BODY=$(cat "$TMP_JSON_TOKEN")
error "Invalid OSF token (HTTP $HTTP_CODE): $RESPONSE_BODY"
fi
}
# === Validate YAML ===
validate_yaml() {
log "INFO" "Validating $OSF_YAML..."
[[ -f "$OSF_YAML" ]] || error "No osf.yaml found. Run publish_osf.sh --init first."
for field in title description category public; do
[[ $(yq e ".$field" "$OSF_YAML") != "null" ]] || error "Missing field: $field in $OSF_YAML"
done
}
# === Read Project ID ===
read_project_id() {
if [[ ! -f "$SCAN_LOG_PUSH" ]] || ! jq -e '.' "$SCAN_LOG_PUSH" >/dev/null 2>&1; then
log "WARN" "No valid push_log.json found"
echo ""
return
fi
NODE_ID=$(jq -r '.project_id // ""' "$SCAN_LOG_PUSH")
echo "$NODE_ID"
}
# === Search for Existing Project by Title ===
find_project_by_title() {
local title="$1"
log "INFO" "Searching for project: $title"
if [[ "$DRY_RUN" == "true" ]]; then
echo "dry-run-$(uuidgen)"
return
fi
ENCODED_TITLE=$(jq -r -n --arg title "$title" '$title|@uri')
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_PROJECT" "https://api.osf.io/v2/nodes/?filter[title]=$ENCODED_TITLE" \
-H "Authorization: Bearer $OSF_TOKEN" 2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
error "Failed to search for project: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
if [[ "$HTTP_CODE" != "200" ]]; then
RESPONSE_BODY=$(cat "$TMP_JSON_PROJECT")
log "WARN" "Failed to search for project (HTTP $HTTP_CODE): $RESPONSE_BODY"
echo ""
return
fi
NODE_ID=$(jq -r '.data[0].id // ""' "$TMP_JSON_PROJECT")
[[ -n "$NODE_ID" ]] && log "INFO" "Found project '$title': $NODE_ID"
echo "$NODE_ID"
}
# === Check and Enable Wiki Settings ===
check_wiki_settings() {
log "INFO" "Checking wiki settings for project $NODE_ID..."
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_PROJECT" "https://api.osf.io/v2/nodes/$NODE_ID/" \
-H "Authorization: Bearer $OSF_TOKEN" 2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
error "Failed to fetch project settings: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
if [[ "$HTTP_CODE" != "200" ]]; then
RESPONSE_BODY=$(cat "$TMP_JSON_PROJECT")
error "Failed to fetch project settings (HTTP $HTTP_CODE): $RESPONSE_BODY"
fi
WIKI_ENABLED=$(jq -r '.data.attributes.wiki_enabled // false' "$TMP_JSON_PROJECT")
if [[ "$WIKI_ENABLED" != "true" ]]; then
log "INFO" "Wiki is disabled. Attempting to enable..."
RESPONSE=$(curl -s -w "\n%{http_code}" -X PATCH "https://api.osf.io/v2/nodes/$NODE_ID/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF
{
"data": {
"id": "$NODE_ID",
"type": "nodes",
"attributes": {
"wiki_enabled": true
}
}
}
EOF
2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
error "Failed to enable wiki: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
if [[ "$HTTP_CODE" != "200" ]]; then
RESPONSE_BODY=$(cat "$TMP_JSON_PROJECT")
error "Failed to enable wiki for project $NODE_ID (HTTP $HTTP_CODE): $RESPONSE_BODY"
fi
log "INFO" "Wiki enabled successfully"
fi
}
# === Check for Existing Wiki Page ===
check_wiki_exists() {
local retries=3
local attempt=1
while [[ $attempt -le $retries ]]; do
log "INFO" "Checking for existing wiki page (attempt $attempt/$retries)..."
# URL-encode the filter parameter to avoid shell interpretation
FILTER_ENCODED=$(jq -r -n --arg filter "home" '$filter|@uri')
WIKI_URL="https://api.osf.io/v2/nodes/$NODE_ID/wikis/?filter[name]=$FILTER_ENCODED"
log "DEBUG" "Executing curl: curl -s -w '\n%{http_code}' -o '$TMP_JSON_WIKI' '$WIKI_URL' -H 'Authorization: Bearer [REDACTED]'"
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_WIKI" "$WIKI_URL" \
-H "Authorization: Bearer $OSF_TOKEN" 2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
if [[ $attempt -eq $retries ]]; then
error "Failed to check for wiki page: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
log "WARN" "curl command failed (no HTTP code returned). Retrying in 5 seconds..."
sleep 5
((attempt++))
continue
fi
if [[ "$HTTP_CODE" != "200" ]]; then
RESPONSE_BODY="No response body"
[[ -f "$TMP_JSON_WIKI" ]] && RESPONSE_BODY=$(cat "$TMP_JSON_WIKI")
error "Failed to check for wiki page (HTTP $HTTP_CODE): $RESPONSE_BODY"
fi
WIKI_ID=$(jq -r '.data[0].id // ""' "$TMP_JSON_WIKI")
if [[ -n "$WIKI_ID" ]]; then
log "INFO" "Found existing wiki page 'home' (ID: $WIKI_ID)"
return 0
else
log "INFO" "No 'home' wiki page found"
return 1
fi
done
}
# === Create Wiki Page ===
create_wiki_page() {
local wiki_path="$1"
log "INFO" "Creating new wiki page 'home'..."
CONTENT=$(jq -Rs . < "$wiki_path")
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_WIKI" -X POST "https://api.osf.io/v2/nodes/$NODE_ID/wikis/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF
{
"data": {
"type": "wikis",
"attributes": {
"name": "home",
"content": $CONTENT
}
}
}
EOF
2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
error "Failed to create wiki page: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
if [[ "$HTTP_CODE" != "201" ]]; then
RESPONSE_BODY="No response body"
[[ -f "$TMP_JSON_WIKI" ]] && RESPONSE_BODY=$(cat "$TMP_JSON_WIKI")
error "Failed to create wiki page (HTTP $HTTP_CODE): $RESPONSE_BODY"
fi
log "INFO" "Wiki page 'home' created successfully"
}
# === Generate Default Wiki with Links ===
generate_wiki() {
local wiki_path="$1"
log "INFO" "Generating default wiki at $wiki_path..."
mkdir -p "$(dirname "$wiki_path")"
{
echo "# Auto-Generated Wiki for $(yq e '.title' "$OSF_YAML")"
echo
echo "## Project Overview"
echo "$(yq e '.description' "$OSF_YAML")"
echo
echo "## Repository Info"
echo "- **Last Commit**: $(git log -1 --pretty=%B 2>/dev/null || echo "No git commits")"
echo "- **Commit Hash**: $(git rev-parse HEAD 2>/dev/null || echo "N/A")"
if [[ -f "$(yq e '.readme.path' "$OSF_YAML")" ]]; then
echo
echo "## README Preview"
head -n 10 "$(yq e '.readme.path' "$OSF_YAML")"
fi
echo
echo "## Internal Documents"
echo "Links to documents uploaded to OSF:"
for section in docs essays images scripts data files; do
local count
count=$(yq e ".${section} | length" "$OSF_YAML")
if [[ "$count" != "0" && "$count" != "null" ]]; then
echo
echo "### $(echo "$section" | tr '[:lower:]' '[:upper:]')"
for ((i = 0; i < count; i++)); do
local name
name=$(yq e ".${section}[$i].name" "$OSF_YAML")
echo "- [$name](https://osf.io/$NODE_ID/files/osfstorage/$name)"
done
fi
done
} > "$wiki_path"
log "INFO" "Default wiki generated at $wiki_path"
}
# === Push Wiki to OSF ===
push_wiki() {
local wiki_path="$1"
log "INFO" "Pushing wiki from $wiki_path"
if [[ "$DRY_RUN" == "true" ]]; then
log "DRY-RUN" "Would push wiki to $NODE_ID"
return 0
fi
# Check if wiki exists; create if it doesn't
if ! check_wiki_exists; then
create_wiki_page "$wiki_path"
return 0 # Creation includes content, so no need to patch
fi
# Wiki exists, update it with PATCH
CONTENT=$(jq -Rs . < "$wiki_path")
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_WIKI" -X PATCH "https://api.osf.io/v2/nodes/$NODE_ID/wikis/home/" \
-H "Authorization: Bearer $OSF_TOKEN" \
-H "Content-Type: application/vnd.api+json" \
-d @- <<EOF
{
"data": {
"type": "wikis",
"attributes": {
"content": $CONTENT
}
}
}
EOF
2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
log "ERROR" "Failed to push wiki: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
return 1
fi
if [[ "$HTTP_CODE" != "200" ]]; then
RESPONSE_BODY="No response body"
[[ -f "$TMP_JSON_WIKI" ]] && RESPONSE_BODY=$(cat "$TMP_JSON_WIKI")
log "ERROR" "Failed to push wiki (HTTP $HTTP_CODE): $RESPONSE_BODY"
return 1
fi
echo "📜 Pushed wiki to https://osf.io/$NODE_ID/" >&2
return 0
}
# === Main Logic ===
wiki_mode() {
validate_yaml
get_token
local title
title=$(yq e '.title' "$OSF_YAML")
NODE_ID=$(read_project_id)
if [[ -n "$NODE_ID" ]]; then
log "INFO" "Using existing OSF project ID: $NODE_ID"
RESPONSE=$(curl -s -w "\n%{http_code}" -o "$TMP_JSON_PROJECT" "https://api.osf.io/v2/nodes/$NODE_ID/" \
-H "Authorization: Bearer $OSF_TOKEN" 2>> "$LOG_DIR/curl_errors.log")
HTTP_CODE=$(echo "$RESPONSE" | tail -n 1)
if [[ -z "$HTTP_CODE" ]]; then
CURL_ERROR=$(cat "$LOG_DIR/curl_errors.log")
error "Failed to validate project ID: curl command failed (no HTTP code returned). Curl error: $CURL_ERROR"
fi
if [[ "$HTTP_CODE" != "200" ]]; then
log "WARN" "Project $NODE_ID not found (HTTP $HTTP_CODE)"
NODE_ID=""
fi
fi
if [[ -z "$NODE_ID" ]]; then
NODE_ID=$(find_project_by_title "$title")
fi
[[ -n "$NODE_ID" ]] || error "Failed to determine OSF project ID"
# Check and enable wiki settings
check_wiki_settings
local wiki_path
wiki_path=$(yq e '.wiki.path' "$OSF_YAML")
if [[ "$wiki_path" == "null" || -z "$wiki_path" ]]; then
log "INFO" "No wiki defined in osf.yaml. Auto-generating..."
wiki_path="docs/generated_wiki.md"
echo "wiki:" >> "$OSF_YAML"
echo " path: \"$wiki_path\"" >> "$OSF_YAML"
echo " overwrite: true" >> "$OSF_YAML"
fi
wiki_path="$BASEDIR/$wiki_path"
if [[ ! -f "$wiki_path" ]]; then
generate_wiki "$wiki_path"
fi
push_wiki "$wiki_path" || error "Wiki push failed"
log "INFO" "Wiki push complete for project $NODE_ID"
echo "✅ Wiki push complete! View at: https://osf.io/$NODE_ID/wiki/" >&2
}
# === Help Menu ===
show_help() {
local verbose="$1"
if [[ "$verbose" == "true" ]]; then
cat <<EOF
Usage: $0 [OPTION]
Publish a wiki page to an OSF project.
Options:
--push Generate (if needed) and push wiki to OSF
--dry-run Simulate actions without making API calls
--verbose Show detailed logs on stderr
--help Show this help message (--help --verbose for more details)
Behavior:
- Requires osf.yaml (run publish_osf.sh --init first if missing).
- Auto-generates a wiki (docs/generated_wiki.md) if none is defined in osf.yaml.
- Updates osf.yaml with the new wiki path if auto-generated.
- Pushes the wiki to the OSF project's wiki/home endpoint.
- Includes links to internal documents (docs, scripts, etc.) from osf.yaml.
Example:
$0 --push # Push wiki to OSF
$0 --dry-run --push # Simulate push
EOF
else
cat <<EOF
Usage: $0 [OPTION]
Publish a wiki page to an OSF project.
Options:
--push Push wiki to OSF
--dry-run Simulate actions
--verbose Show detailed logs
--help Show this help (--help --verbose for more)
Example:
$0 --push # Push wiki to OSF
EOF
fi
}
# === CLI Dispatcher ===
DRY_RUN="false"
VERBOSE="false"
MODE=""
while [[ $# -gt 0 ]]; do
case "$1" in
--push) MODE="wiki" ;;
--dry-run) DRY_RUN="true" ;;
--verbose) VERBOSE="true" ;;
--help) show_help "$VERBOSE"; exit 0 ;;
*) echo "Unknown option: $1" >&2; show_help "false"; exit 1 ;;
esac
shift
done
case "$MODE" in
wiki) wiki_mode ;;
*) show_help "false"; exit 0 ;;
esac

View file

@ -0,0 +1,58 @@
# Auto-Generated Wiki for git-sigil
## Project Overview
Auto-generated by GitField OSF publisher on 2025-06-05T23:01:38-05:00
## Repository Info
- **Last Commit**: got publish_osf.sh working
- **Commit Hash**: a1d16f2903e1d79b846ed969804810f245e169b8
## README Preview
# 🌱 GitField: Multi-Platform Repository Sync for Resilience and Sovereignty
## 📜 Overview
**GitField** is a collection of Bash scripts designed to synchronize a Git repository across **Radicle**, **GitLab**, **Bitbucket**, and **GitHub** using a recursive, metadata-rich workflow. This project ensures **redundancy**, **sovereignty**, and **transparency** by generating interconnected metadata snapshots and distributing them across decentralized and centralized platforms. The strategy protects against deplatforming risks, motivated by past attempts to suppress this work by individuals such as **Mr. Joel Johnson** ([Mirror post](https://mirror.xyz/neutralizingnarcissism.eth/x40_zDWWrYOJ7nh8Y0fk06_3kNEP0KteSSRjPmXkiGg?utm_medium=social&utm_source=heylink.me)) and **Dr. Peter Gaied** ([Paragraph post](https://paragraph.com/@neutralizingnarcissism/%F0%9F%9C%81-the-narcissistic-messiah)). By prioritizing decentralization with a Radicle-first approach and recursively pushing metadata, GitField creates a resilient, auditable chain of project state, ensuring persistence and accessibility for collaborators, communities, and future AI systems.
## 🛡️ Purpose and Intention
The GitField project is driven by three core principles:
## Internal Documents
Links to documents uploaded to OSF:
### DOCS
- [docs/bitbucket/CLI-ONLY_workflow_bitbucket_Ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/bitbucket/CLI-ONLY_workflow_bitbucket_Ubuntu.md)
- [docs/bitbucket/CLI-ONLY_workflow_bitbucket_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/bitbucket/CLI-ONLY_workflow_bitbucket_ubuntu.md)
- [docs/generated_wiki.md](https://osf.io/uvzx7/files/osfstorage/docs/generated_wiki.md)
- [docs/github/1_prerequisites_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/1_prerequisites_github_ubuntu.md)
- [docs/github/2_create_remote_repo_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/2_create_remote_repo_github_ubuntu.md)
- [docs/github/3_commit_existing_repo_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/3_commit_existing_repo_github_ubuntu.md)
- [docs/github/CLI-ONLY_workflow_github_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/github/CLI-ONLY_workflow_github_ubuntu.md)
- [docs/gitlab/1_prerequisites_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/1_prerequisites_gitlab_ubuntu.md)
- [docs/gitlab/2_create_remote_repo_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/2_create_remote_repo_gitlab_ubuntu.md)
- [docs/gitlab/3_commit_existing_repo_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/3_commit_existing_repo_gitlab_ubuntu.md)
- [docs/gitlab/CLI-ONLY_workflow_gitlab_ubuntu.md](https://osf.io/uvzx7/files/osfstorage/docs/gitlab/CLI-ONLY_workflow_gitlab_ubuntu.md)
- [docs/osf/old/for_radicle.md](https://osf.io/uvzx7/files/osfstorage/docs/osf/old/for_radicle.md)
- [docs/radicle/for_radicle.md](https://osf.io/uvzx7/files/osfstorage/docs/radicle/for_radicle.md)
### SCRIPTS
- [INSTALL.sh](https://osf.io/uvzx7/files/osfstorage/INSTALL.sh)
- [bin/gitfield-sync-gdrive.sh](https://osf.io/uvzx7/files/osfstorage/bin/gitfield-sync-gdrive.sh)
- [bin/mount-gdrive.sh](https://osf.io/uvzx7/files/osfstorage/bin/mount-gdrive.sh)
- [bin/publish_osf.sh](https://osf.io/uvzx7/files/osfstorage/bin/publish_osf.sh)
- [bin/sync-metadata.sh](https://osf.io/uvzx7/files/osfstorage/bin/sync-metadata.sh)
- [docs/osf/new/test-osf-api.sh](https://osf.io/uvzx7/files/osfstorage/docs/osf/new/test-osf-api.sh)
- [docs/osf/old/test-osf-api.sh](https://osf.io/uvzx7/files/osfstorage/docs/osf/old/test-osf-api.sh)
- [tools/invoke_solaria.py](https://osf.io/uvzx7/files/osfstorage/tools/invoke_solaria.py)
### DATA
- [docs/osf/new/gitfield.osf.yaml](https://osf.io/uvzx7/files/osfstorage/docs/osf/new/gitfield.osf.yaml)
- [docs/osf/old/gitfield.osf.yaml](https://osf.io/uvzx7/files/osfstorage/docs/osf/old/gitfield.osf.yaml)
- [osf.yaml](https://osf.io/uvzx7/files/osfstorage/osf.yaml)
### FILES
- [GITFIELD.md](https://osf.io/uvzx7/files/osfstorage/GITFIELD.md)
- [LICENSE](https://osf.io/uvzx7/files/osfstorage/LICENSE)
- [bin/SolariaSeedPacket_∞.20_SacredMomentEdition.md](https://osf.io/uvzx7/files/osfstorage/bin/SolariaSeedPacket_∞.20_SacredMomentEdition.md)