mirror of
https://github.com/rustfs/rustfs.git
synced 2026-01-17 09:40:32 +00:00
Compare commits
35 Commits
1.0.0-alph
...
1.0.0-alph
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42b645e355 | ||
|
|
f27ee96014 | ||
|
|
20cd117aa6 | ||
|
|
fc8931d69f | ||
|
|
0167b2decd | ||
|
|
e67980ff3c | ||
|
|
96760bba5a | ||
|
|
2501d7d241 | ||
|
|
55b84262b5 | ||
|
|
ce4252eb1a | ||
|
|
db708917b4 | ||
|
|
8ddb45627d | ||
|
|
550c225b79 | ||
|
|
0d46b550a8 | ||
|
|
0693cca1a4 | ||
|
|
0d9f9e381a | ||
|
|
6c7aa5a7ae | ||
|
|
a27d935925 | ||
|
|
b4f87a4fee | ||
|
|
ee5f94a2e2 | ||
|
|
9c3cf554d3 | ||
|
|
addbfa5487 | ||
|
|
5eb461d7b7 | ||
|
|
1ea45afcd7 | ||
|
|
dbd86f6aee | ||
|
|
af693f7b3f | ||
|
|
3be5ee6445 | ||
|
|
0acc8fe26a | ||
|
|
ecf40eb86c | ||
|
|
48ce7055f8 | ||
|
|
749f55d688 | ||
|
|
e5d17f5382 | ||
|
|
982cc66c74 | ||
|
|
74bf4909c8 | ||
|
|
9c956b4445 |
@@ -6,10 +6,10 @@ This directory contains Docker configuration files and supporting infrastructure
|
||||
|
||||
```
|
||||
rustfs/
|
||||
├── Dockerfile # Production image (Alpine + GitHub Releases)
|
||||
├── Dockerfile.source # Source build (Debian + cross-compilation)
|
||||
├── cargo.config.toml # Rust cargo configuration
|
||||
├── Dockerfile # Production image (Alpine + pre-built binaries)
|
||||
├── Dockerfile.source # Development image (Debian + source build)
|
||||
├── docker-buildx.sh # Multi-architecture build script
|
||||
├── Makefile # Build automation with simplified commands
|
||||
└── .docker/ # Supporting infrastructure
|
||||
├── observability/ # Monitoring and observability configs
|
||||
├── compose/ # Docker Compose configurations
|
||||
@@ -64,7 +64,11 @@ docker run rustfs/rustfs:main-latest # Main branch latest
|
||||
### Development Environment
|
||||
|
||||
```bash
|
||||
# Start development container
|
||||
# Quick setup using Makefile (recommended)
|
||||
make docker-dev-local # Build development image locally
|
||||
make dev-env-start # Start development container
|
||||
|
||||
# Manual Docker commands
|
||||
docker run -it -v $(pwd):/workspace -p 9000:9000 rustfs/rustfs:latest-dev
|
||||
|
||||
# Build from source locally
|
||||
@@ -76,9 +80,33 @@ docker-compose up rustfs-dev
|
||||
|
||||
## 🏗️ Build Arguments and Scripts
|
||||
|
||||
### Using docker-buildx.sh (Recommended)
|
||||
### Using Makefile Commands (Recommended)
|
||||
|
||||
For multi-architecture builds, use the provided script:
|
||||
The easiest way to build images using simplified commands:
|
||||
|
||||
```bash
|
||||
# Development images (build from source)
|
||||
make docker-dev-local # Build for local use (single arch)
|
||||
make docker-dev # Build multi-arch (for CI/CD)
|
||||
make docker-dev-push REGISTRY=xxx # Build and push to registry
|
||||
|
||||
# Production images (using pre-built binaries)
|
||||
make docker-buildx # Build multi-arch production images
|
||||
make docker-buildx-push # Build and push production images
|
||||
make docker-buildx-version VERSION=v1.0.0 # Build specific version
|
||||
|
||||
# Development environment
|
||||
make dev-env-start # Start development container
|
||||
make dev-env-stop # Stop development container
|
||||
make dev-env-restart # Restart development container
|
||||
|
||||
# Help
|
||||
make help-docker # Show all Docker-related commands
|
||||
```
|
||||
|
||||
### Using docker-buildx.sh (Advanced)
|
||||
|
||||
For direct script usage and advanced scenarios:
|
||||
|
||||
```bash
|
||||
# Build latest version for all architectures
|
||||
@@ -147,17 +175,51 @@ Architecture is automatically detected during build using Docker's `TARGETARCH`
|
||||
|
||||
## 🛠️ Development Workflow
|
||||
|
||||
For local development and testing:
|
||||
### Quick Start with Makefile (Recommended)
|
||||
|
||||
```bash
|
||||
# Quick development setup
|
||||
docker-compose up rustfs-dev
|
||||
# 1. Start development environment
|
||||
make dev-env-start
|
||||
|
||||
# Custom source build
|
||||
docker build -f Dockerfile.source -t rustfs:custom .
|
||||
# 2. Your development container is now running with:
|
||||
# - Port 9000 exposed for RustFS
|
||||
# - Port 9010 exposed for admin console
|
||||
# - Current directory mounted as /workspace
|
||||
|
||||
# 3. Stop when done
|
||||
make dev-env-stop
|
||||
```
|
||||
|
||||
### Manual Development Setup
|
||||
|
||||
```bash
|
||||
# Build development image from source
|
||||
make docker-dev-local
|
||||
|
||||
# Or use traditional Docker commands
|
||||
docker build -f Dockerfile.source -t rustfs:dev .
|
||||
|
||||
# Run with development tools
|
||||
docker run -it -v $(pwd):/workspace rustfs:custom bash
|
||||
docker run -it -v $(pwd):/workspace -p 9000:9000 rustfs:dev bash
|
||||
|
||||
# Or use docker-compose for complex setups
|
||||
docker-compose up rustfs-dev
|
||||
```
|
||||
|
||||
### Common Development Tasks
|
||||
|
||||
```bash
|
||||
# Build and test locally
|
||||
make build # Build binary natively
|
||||
make docker-dev-local # Build development Docker image
|
||||
make test # Run tests
|
||||
make fmt # Format code
|
||||
make clippy # Run linter
|
||||
|
||||
# Get help
|
||||
make help # General help
|
||||
make help-docker # Docker-specific help
|
||||
make help-build # Build-specific help
|
||||
```
|
||||
|
||||
## 🚀 CI/CD Integration
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
FROM alpine:3.18
|
||||
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Install base dependencies
|
||||
RUN apk add --no-cache \
|
||||
wget \
|
||||
git \
|
||||
curl \
|
||||
unzip \
|
||||
gcc \
|
||||
musl-dev \
|
||||
pkgconfig \
|
||||
openssl-dev \
|
||||
dbus-dev \
|
||||
wayland-dev \
|
||||
webkit2gtk-4.1-dev \
|
||||
build-base \
|
||||
linux-headers
|
||||
|
||||
# install protoc
|
||||
RUN wget https://github.com/protocolbuffers/protobuf/releases/download/v30.2/protoc-30.2-linux-x86_64.zip \
|
||||
&& unzip protoc-30.2-linux-x86_64.zip -d protoc3 \
|
||||
&& mv protoc3/bin/* /usr/local/bin/ && chmod +x /usr/local/bin/protoc \
|
||||
&& mv protoc3/include/* /usr/local/include/ && rm -rf protoc-30.2-linux-x86_64.zip protoc3
|
||||
|
||||
# install flatc
|
||||
RUN wget https://github.com/google/flatbuffers/releases/download/v24.3.25/Linux.flatc.binary.g++-13.zip \
|
||||
&& unzip Linux.flatc.binary.g++-13.zip \
|
||||
&& mv flatc /usr/local/bin/ && chmod +x /usr/local/bin/flatc && rm -rf Linux.flatc.binary.g++-13.zip
|
||||
|
||||
# install rust
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
|
||||
# Set PATH for rust
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
COPY .docker/cargo.config.toml /root/.cargo/config.toml
|
||||
|
||||
WORKDIR /root/rustfs
|
||||
303
.github/workflows/build.yml
vendored
303
.github/workflows/build.yml
vendored
@@ -12,6 +12,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Build and Release Workflow
|
||||
#
|
||||
# This workflow builds RustFS binaries and automatically triggers Docker image builds.
|
||||
#
|
||||
# Flow:
|
||||
# 1. Build binaries for multiple platforms
|
||||
# 2. Upload binaries to OSS storage
|
||||
# 3. Trigger docker.yml to build and push images using the uploaded binaries
|
||||
#
|
||||
# Manual Parameters:
|
||||
# - build_docker: Build and push Docker images (default: true)
|
||||
|
||||
name: Build and Release
|
||||
|
||||
on:
|
||||
@@ -52,10 +64,10 @@ on:
|
||||
- cron: "0 0 * * 0" # Weekly on Sunday at midnight UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_build:
|
||||
description: "Force build even without changes"
|
||||
build_docker:
|
||||
description: "Build and push Docker images after binary build"
|
||||
required: false
|
||||
default: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
@@ -117,7 +129,6 @@ jobs:
|
||||
echo "🛠️ Development build detected"
|
||||
elif [[ "${{ github.event_name }}" == "schedule" ]] || \
|
||||
[[ "${{ github.event_name }}" == "workflow_dispatch" ]] || \
|
||||
[[ "${{ github.event.inputs.force_build }}" == "true" ]] || \
|
||||
[[ "${{ contains(github.event.head_commit.message, '--build') }}" == "true" ]]; then
|
||||
# Scheduled or manual build
|
||||
should_build=true
|
||||
@@ -231,7 +242,7 @@ jobs:
|
||||
cargo install cross --git https://github.com/cross-rs/cross
|
||||
cross build --release --target ${{ matrix.target }} -p rustfs --bins
|
||||
else
|
||||
# Use zigbuild for Linux ARM64
|
||||
# Use zigbuild for other cross-compilation
|
||||
cargo zigbuild --release --target ${{ matrix.target }} -p rustfs --bins
|
||||
fi
|
||||
else
|
||||
@@ -445,6 +456,13 @@ jobs:
|
||||
echo "🔢 Version: $VERSION"
|
||||
echo ""
|
||||
|
||||
# Check build status
|
||||
BUILD_STATUS="${{ needs.build-rustfs.result }}"
|
||||
|
||||
echo "📊 Build Results:"
|
||||
echo " 📦 All platforms: $BUILD_STATUS"
|
||||
echo ""
|
||||
|
||||
case "$BUILD_TYPE" in
|
||||
"development")
|
||||
echo "🛠️ Development build artifacts have been uploaded to OSS dev directory"
|
||||
@@ -453,11 +471,282 @@ jobs:
|
||||
"release")
|
||||
echo "🚀 Release build artifacts have been uploaded to OSS release directory"
|
||||
echo "✅ This build is ready for production use"
|
||||
echo "🏷️ GitHub Release will be created automatically by the release workflow"
|
||||
echo "🏷️ GitHub Release will be created in this workflow"
|
||||
;;
|
||||
"prerelease")
|
||||
echo "🧪 Prerelease build artifacts have been uploaded to OSS release directory"
|
||||
echo "⚠️ This is a prerelease build - use with caution"
|
||||
echo "🏷️ GitHub Release will be created automatically by the release workflow"
|
||||
echo "🏷️ GitHub Release will be created in this workflow"
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "🐳 Docker Images:"
|
||||
if [[ "${{ github.event.inputs.build_docker }}" == "false" ]]; then
|
||||
echo "⏭️ Docker image build was skipped (binary only build)"
|
||||
elif [[ "$BUILD_STATUS" == "success" ]]; then
|
||||
echo "🔄 Docker images will be built and pushed automatically via workflow_run event"
|
||||
else
|
||||
echo "❌ Docker image build will be skipped due to build failure"
|
||||
fi
|
||||
|
||||
# Create GitHub Release (only for tag pushes)
|
||||
create-release:
|
||||
name: Create GitHub Release
|
||||
needs: [build-check, build-rustfs]
|
||||
if: startsWith(github.ref, 'refs/tags/') && needs.build-check.outputs.build_type != 'development'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
outputs:
|
||||
release_id: ${{ steps.create.outputs.release_id }}
|
||||
release_url: ${{ steps.create.outputs.release_url }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create GitHub Release
|
||||
id: create
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
TAG="${{ needs.build-check.outputs.version }}"
|
||||
VERSION="${{ needs.build-check.outputs.version }}"
|
||||
IS_PRERELEASE="${{ needs.build-check.outputs.is_prerelease }}"
|
||||
BUILD_TYPE="${{ needs.build-check.outputs.build_type }}"
|
||||
|
||||
# Determine release type for title
|
||||
if [[ "$BUILD_TYPE" == "prerelease" ]]; then
|
||||
if [[ "$TAG" == *"alpha"* ]]; then
|
||||
RELEASE_TYPE="alpha"
|
||||
elif [[ "$TAG" == *"beta"* ]]; then
|
||||
RELEASE_TYPE="beta"
|
||||
elif [[ "$TAG" == *"rc"* ]]; then
|
||||
RELEASE_TYPE="rc"
|
||||
else
|
||||
RELEASE_TYPE="prerelease"
|
||||
fi
|
||||
else
|
||||
RELEASE_TYPE="release"
|
||||
fi
|
||||
|
||||
# Check if release already exists
|
||||
if gh release view "$TAG" >/dev/null 2>&1; then
|
||||
echo "Release $TAG already exists"
|
||||
RELEASE_ID=$(gh release view "$TAG" --json databaseId --jq '.databaseId')
|
||||
RELEASE_URL=$(gh release view "$TAG" --json url --jq '.url')
|
||||
else
|
||||
# Get release notes from tag message
|
||||
RELEASE_NOTES=$(git tag -l --format='%(contents)' "${TAG}")
|
||||
if [[ -z "$RELEASE_NOTES" || "$RELEASE_NOTES" =~ ^[[:space:]]*$ ]]; then
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
RELEASE_NOTES="Pre-release ${VERSION} (${RELEASE_TYPE})"
|
||||
else
|
||||
RELEASE_NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create release title
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
TITLE="RustFS $VERSION (${RELEASE_TYPE})"
|
||||
else
|
||||
TITLE="RustFS $VERSION"
|
||||
fi
|
||||
|
||||
# Create the release
|
||||
PRERELEASE_FLAG=""
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
PRERELEASE_FLAG="--prerelease"
|
||||
fi
|
||||
|
||||
gh release create "$TAG" \
|
||||
--title "$TITLE" \
|
||||
--notes "$RELEASE_NOTES" \
|
||||
$PRERELEASE_FLAG \
|
||||
--draft
|
||||
|
||||
RELEASE_ID=$(gh release view "$TAG" --json databaseId --jq '.databaseId')
|
||||
RELEASE_URL=$(gh release view "$TAG" --json url --jq '.url')
|
||||
fi
|
||||
|
||||
echo "release_id=$RELEASE_ID" >> $GITHUB_OUTPUT
|
||||
echo "release_url=$RELEASE_URL" >> $GITHUB_OUTPUT
|
||||
echo "Created release: $RELEASE_URL"
|
||||
|
||||
# Prepare and upload release assets
|
||||
upload-release-assets:
|
||||
name: Upload Release Assets
|
||||
needs: [build-check, build-rustfs, create-release]
|
||||
if: startsWith(github.ref, 'refs/tags/') && needs.build-check.outputs.build_type != 'development'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all build artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./artifacts
|
||||
pattern: rustfs-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare release assets
|
||||
id: prepare
|
||||
run: |
|
||||
VERSION="${{ needs.build-check.outputs.version }}"
|
||||
TAG="${{ needs.build-check.outputs.version }}"
|
||||
|
||||
mkdir -p ./release-assets
|
||||
|
||||
# Copy and verify artifacts
|
||||
ASSETS_COUNT=0
|
||||
for file in ./artifacts/*.zip; do
|
||||
if [[ -f "$file" ]]; then
|
||||
cp "$file" ./release-assets/
|
||||
ASSETS_COUNT=$((ASSETS_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $ASSETS_COUNT -eq 0 ]]; then
|
||||
echo "❌ No artifacts found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd ./release-assets
|
||||
|
||||
# Generate checksums
|
||||
if ls *.zip >/dev/null 2>&1; then
|
||||
sha256sum *.zip > SHA256SUMS
|
||||
sha512sum *.zip > SHA512SUMS
|
||||
fi
|
||||
|
||||
# Create signature placeholder files
|
||||
for file in *.zip; do
|
||||
echo "# Signature for $file" > "${file}.asc"
|
||||
echo "# GPG signature will be added in future versions" >> "${file}.asc"
|
||||
done
|
||||
|
||||
echo "📦 Prepared assets:"
|
||||
ls -la
|
||||
|
||||
echo "🔢 Asset count: $ASSETS_COUNT"
|
||||
|
||||
- name: Upload to GitHub Release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
TAG="${{ needs.build-check.outputs.version }}"
|
||||
|
||||
cd ./release-assets
|
||||
|
||||
# Upload all files
|
||||
for file in *; do
|
||||
if [[ -f "$file" ]]; then
|
||||
echo "📤 Uploading $file..."
|
||||
gh release upload "$TAG" "$file" --clobber
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ All assets uploaded successfully"
|
||||
|
||||
# Update latest.json for stable releases only
|
||||
update-latest-version:
|
||||
name: Update Latest Version
|
||||
needs: [build-check, upload-release-assets]
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update latest.json
|
||||
env:
|
||||
OSS_ACCESS_KEY_ID: ${{ secrets.ALICLOUDOSS_KEY_ID }}
|
||||
OSS_ACCESS_KEY_SECRET: ${{ secrets.ALICLOUDOSS_KEY_SECRET }}
|
||||
run: |
|
||||
if [[ -z "$OSS_ACCESS_KEY_ID" ]]; then
|
||||
echo "⚠️ OSS credentials not available, skipping latest.json update"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
VERSION="${{ needs.build-check.outputs.version }}"
|
||||
TAG="${{ needs.build-check.outputs.version }}"
|
||||
|
||||
# Install ossutil
|
||||
OSSUTIL_VERSION="2.1.1"
|
||||
OSSUTIL_ZIP="ossutil-${OSSUTIL_VERSION}-linux-amd64.zip"
|
||||
OSSUTIL_DIR="ossutil-${OSSUTIL_VERSION}-linux-amd64"
|
||||
|
||||
curl -o "$OSSUTIL_ZIP" "https://gosspublic.alicdn.com/ossutil/v2/${OSSUTIL_VERSION}/${OSSUTIL_ZIP}"
|
||||
unzip "$OSSUTIL_ZIP"
|
||||
chmod +x "${OSSUTIL_DIR}/ossutil"
|
||||
|
||||
# Create latest.json
|
||||
cat > latest.json << EOF
|
||||
{
|
||||
"version": "${VERSION}",
|
||||
"tag": "${TAG}",
|
||||
"release_date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"release_type": "stable",
|
||||
"download_url": "https://github.com/${{ github.repository }}/releases/tag/${TAG}"
|
||||
}
|
||||
EOF
|
||||
|
||||
# Upload to OSS
|
||||
./${OSSUTIL_DIR}/ossutil cp latest.json oss://rustfs-version/latest.json --force
|
||||
|
||||
echo "✅ Updated latest.json for stable release $VERSION"
|
||||
|
||||
# Publish release (remove draft status)
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
needs: [build-check, create-release, upload-release-assets]
|
||||
if: startsWith(github.ref, 'refs/tags/') && needs.build-check.outputs.build_type != 'development'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update release notes and publish
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
TAG="${{ needs.build-check.outputs.version }}"
|
||||
VERSION="${{ needs.build-check.outputs.version }}"
|
||||
IS_PRERELEASE="${{ needs.build-check.outputs.is_prerelease }}"
|
||||
BUILD_TYPE="${{ needs.build-check.outputs.build_type }}"
|
||||
|
||||
# Determine release type
|
||||
if [[ "$BUILD_TYPE" == "prerelease" ]]; then
|
||||
if [[ "$TAG" == *"alpha"* ]]; then
|
||||
RELEASE_TYPE="alpha"
|
||||
elif [[ "$TAG" == *"beta"* ]]; then
|
||||
RELEASE_TYPE="beta"
|
||||
elif [[ "$TAG" == *"rc"* ]]; then
|
||||
RELEASE_TYPE="rc"
|
||||
else
|
||||
RELEASE_TYPE="prerelease"
|
||||
fi
|
||||
else
|
||||
RELEASE_TYPE="release"
|
||||
fi
|
||||
|
||||
# Get original release notes from tag
|
||||
ORIGINAL_NOTES=$(git tag -l --format='%(contents)' "${TAG}")
|
||||
if [[ -z "$ORIGINAL_NOTES" || "$ORIGINAL_NOTES" =~ ^[[:space:]]*$ ]]; then
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
ORIGINAL_NOTES="Pre-release ${VERSION} (${RELEASE_TYPE})"
|
||||
else
|
||||
ORIGINAL_NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Publish the release (remove draft status)
|
||||
gh release edit "$TAG" --draft=false
|
||||
|
||||
echo "🎉 Released $TAG successfully!"
|
||||
echo "📄 Release URL: ${{ needs.create-release.outputs.release_url }}"
|
||||
|
||||
408
.github/workflows/docker.yml
vendored
408
.github/workflows/docker.yml
vendored
@@ -12,42 +12,33 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Docker Images Workflow
|
||||
#
|
||||
# This workflow builds Docker images using pre-built binaries from the build workflow.
|
||||
#
|
||||
# Trigger Types:
|
||||
# 1. workflow_run: Automatically triggered when "Build and Release" workflow completes
|
||||
# 2. workflow_dispatch: Manual trigger for standalone Docker builds
|
||||
#
|
||||
# Key Features:
|
||||
# - Only triggers when Linux builds (x86_64 + aarch64) are successful
|
||||
# - Independent of macOS/Windows build status
|
||||
# - Uses workflow_run event for precise control
|
||||
# - Only builds Docker images for releases and prereleases (development builds are skipped)
|
||||
|
||||
name: Docker Images
|
||||
|
||||
# Permissions needed for workflow_run event and Docker registry access
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["*.*.*"]
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "**.txt"
|
||||
- ".github/**"
|
||||
- "docs/**"
|
||||
- "deploy/**"
|
||||
- "scripts/dev_*.sh"
|
||||
- "LICENSE*"
|
||||
- "README*"
|
||||
- "**/*.png"
|
||||
- "**/*.jpg"
|
||||
- "**/*.svg"
|
||||
- ".gitignore"
|
||||
- ".dockerignore"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "**.txt"
|
||||
- ".github/**"
|
||||
- "docs/**"
|
||||
- "deploy/**"
|
||||
- "scripts/dev_*.sh"
|
||||
- "LICENSE*"
|
||||
- "README*"
|
||||
- "**/*.png"
|
||||
- "**/*.jpg"
|
||||
- "**/*.svg"
|
||||
- ".gitignore"
|
||||
- ".dockerignore"
|
||||
# Automatically triggered when build workflow completes
|
||||
workflow_run:
|
||||
workflows: ["Build and Release"]
|
||||
types: [completed]
|
||||
# Manual trigger with same parameters for consistency
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
push_images:
|
||||
@@ -56,9 +47,9 @@ on:
|
||||
default: true
|
||||
type: boolean
|
||||
version:
|
||||
description: "Version to build (latest, main-latest, dev-latest, or specific version like v1.0.0 or dev-abc123)"
|
||||
description: "Version to build (latest for stable release, or specific version like v1.0.0, v1.0.0-alpha1)"
|
||||
required: false
|
||||
default: "main-latest"
|
||||
default: "latest"
|
||||
type: string
|
||||
force_rebuild:
|
||||
description: "Force rebuild even if binary exists (useful for testing)"
|
||||
@@ -67,12 +58,14 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
DOCKERHUB_USERNAME: rustfs
|
||||
CARGO_TERM_COLOR: always
|
||||
REGISTRY_DOCKERHUB: rustfs/rustfs
|
||||
REGISTRY_GHCR: ghcr.io/${{ github.repository }}
|
||||
DOCKER_PLATFORMS: linux/amd64,linux/arm64
|
||||
|
||||
jobs:
|
||||
# Docker build strategy check
|
||||
# Check if we should build Docker images
|
||||
build-check:
|
||||
name: Docker Build Check
|
||||
runs-on: ubuntu-latest
|
||||
@@ -89,6 +82,8 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For workflow_run events, checkout the specific commit that triggered the workflow
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
- name: Check build conditions
|
||||
id: check
|
||||
@@ -101,26 +96,103 @@ jobs:
|
||||
is_prerelease=false
|
||||
create_latest=false
|
||||
|
||||
# Get short SHA for all builds
|
||||
short_sha=$(git rev-parse --short HEAD)
|
||||
if [[ "${{ github.event_name }}" == "workflow_run" ]]; then
|
||||
# Triggered by build workflow completion
|
||||
echo "🔗 Triggered by build workflow completion"
|
||||
|
||||
# Always build on workflow_dispatch or when changes detected
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]] || \
|
||||
[[ "${{ github.event_name }}" == "push" ]] || \
|
||||
[[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
should_build=true
|
||||
fi
|
||||
# Check if the triggering workflow was successful
|
||||
# If the workflow succeeded, it means ALL builds (including Linux x86_64 and aarch64) succeeded
|
||||
if [[ "${{ github.event.workflow_run.conclusion }}" == "success" ]]; then
|
||||
echo "✅ Build workflow succeeded, all builds including Linux are successful"
|
||||
should_build=true
|
||||
should_push=true
|
||||
else
|
||||
echo "❌ Build workflow failed (conclusion: ${{ github.event.workflow_run.conclusion }}), skipping Docker build"
|
||||
should_build=false
|
||||
fi
|
||||
|
||||
# Determine build type and version
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]] && [[ -n "${{ github.event.inputs.version }}" ]]; then
|
||||
# Manual trigger with version input
|
||||
# Extract version info from commit message or use commit SHA
|
||||
# Use Git to generate consistent short SHA (ensures uniqueness like build.yml)
|
||||
short_sha=$(git rev-parse --short "${{ github.event.workflow_run.head_sha }}")
|
||||
|
||||
# Determine build type based on triggering workflow event and ref
|
||||
triggering_event="${{ github.event.workflow_run.event }}"
|
||||
head_branch="${{ github.event.workflow_run.head_branch }}"
|
||||
|
||||
echo "🔍 Analyzing triggering workflow:"
|
||||
echo " 📋 Event: $triggering_event"
|
||||
echo " 🌿 Head branch: $head_branch"
|
||||
echo " 📎 Head SHA: ${{ github.event.workflow_run.head_sha }}"
|
||||
|
||||
# Check if this was triggered by a tag push
|
||||
if [[ "$triggering_event" == "push" ]]; then
|
||||
# For tag pushes, head_branch will be like "refs/tags/v1.0.0" or just "v1.0.0"
|
||||
if [[ "$head_branch" == refs/tags/* ]]; then
|
||||
# Extract tag name from refs/tags/TAG_NAME
|
||||
tag_name="${head_branch#refs/tags/}"
|
||||
version="$tag_name"
|
||||
elif [[ "$head_branch" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+ ]]; then
|
||||
# Direct tag name like "v1.0.0" or "1.0.0-alpha.1"
|
||||
version="$head_branch"
|
||||
elif [[ "$head_branch" == "main" ]]; then
|
||||
# Regular branch push to main
|
||||
build_type="development"
|
||||
version="dev-${short_sha}"
|
||||
should_build=false
|
||||
echo "⏭️ Skipping Docker build for development version (main branch push)"
|
||||
else
|
||||
# Other branch push
|
||||
build_type="development"
|
||||
version="dev-${short_sha}"
|
||||
should_build=false
|
||||
echo "⏭️ Skipping Docker build for development version (branch: $head_branch)"
|
||||
fi
|
||||
|
||||
# If we extracted a version (tag), determine release type
|
||||
if [[ -n "$version" ]] && [[ "$version" != "dev-${short_sha}" ]]; then
|
||||
# Remove 'v' prefix if present for consistent version format
|
||||
if [[ "$version" == v* ]]; then
|
||||
version="${version#v}"
|
||||
fi
|
||||
|
||||
if [[ "$version" == *"alpha"* ]] || [[ "$version" == *"beta"* ]] || [[ "$version" == *"rc"* ]]; then
|
||||
build_type="prerelease"
|
||||
is_prerelease=true
|
||||
echo "🧪 Building Docker image for prerelease: $version"
|
||||
else
|
||||
build_type="release"
|
||||
create_latest=true
|
||||
echo "🚀 Building Docker image for release: $version"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# Non-push events
|
||||
build_type="development"
|
||||
version="dev-${short_sha}"
|
||||
should_build=false
|
||||
echo "⏭️ Skipping Docker build for development version (event: $triggering_event)"
|
||||
fi
|
||||
|
||||
echo "🔄 Build triggered by workflow_run:"
|
||||
echo " 📋 Conclusion: ${{ github.event.workflow_run.conclusion }}"
|
||||
echo " 🌿 Branch: ${{ github.event.workflow_run.head_branch }}"
|
||||
echo " 📎 SHA: ${{ github.event.workflow_run.head_sha }}"
|
||||
echo " 🎯 Event: ${{ github.event.workflow_run.event }}"
|
||||
|
||||
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
# Manual trigger
|
||||
input_version="${{ github.event.inputs.version }}"
|
||||
version="${input_version}"
|
||||
force_rebuild="${{ github.event.inputs.force_rebuild }}"
|
||||
should_push="${{ github.event.inputs.push_images }}"
|
||||
should_build=true
|
||||
|
||||
# Get short SHA
|
||||
short_sha=$(git rev-parse --short HEAD)
|
||||
|
||||
echo "🎯 Manual Docker build triggered:"
|
||||
echo " 📋 Requested version: $input_version"
|
||||
echo " 🔧 Force rebuild: $force_rebuild"
|
||||
echo " 🔧 Force rebuild: ${{ github.event.inputs.force_rebuild }}"
|
||||
echo " 🚀 Push images: $should_push"
|
||||
|
||||
case "$input_version" in
|
||||
"latest")
|
||||
@@ -128,68 +200,25 @@ jobs:
|
||||
create_latest=true
|
||||
echo "🚀 Building with latest stable release version"
|
||||
;;
|
||||
"main-latest")
|
||||
build_type="development"
|
||||
version="main-latest"
|
||||
echo "🛠️ Building with main branch latest development version"
|
||||
;;
|
||||
"dev-latest")
|
||||
build_type="development"
|
||||
version="dev-latest"
|
||||
echo "🛠️ Building with development latest version"
|
||||
;;
|
||||
v[0-9]*)
|
||||
build_type="release"
|
||||
create_latest=true
|
||||
echo "📦 Building with specific release version: $input_version"
|
||||
;;
|
||||
v*alpha*|v*beta*|v*rc*)
|
||||
# Prerelease versions (must match first, more specific)
|
||||
v*alpha*|v*beta*|v*rc*|*alpha*|*beta*|*rc*)
|
||||
build_type="prerelease"
|
||||
is_prerelease=true
|
||||
echo "🧪 Building with prerelease version: $input_version"
|
||||
;;
|
||||
dev-[a-f0-9]*)
|
||||
build_type="development"
|
||||
echo "🔧 Building with specific development version: $input_version"
|
||||
# Release versions (match after prereleases, more general)
|
||||
v[0-9]*|[0-9]*.*.*)
|
||||
build_type="release"
|
||||
create_latest=true
|
||||
echo "📦 Building with specific release version: $input_version"
|
||||
;;
|
||||
*)
|
||||
build_type="development"
|
||||
echo "🔧 Building with custom version: $input_version"
|
||||
echo "⚠️ Warning: Custom version format may not follow standard patterns"
|
||||
# Invalid version for Docker build
|
||||
should_build=false
|
||||
echo "❌ Invalid version for Docker build: $input_version"
|
||||
echo "⚠️ Only release versions (latest, v1.0.0, 1.0.0) and prereleases (v1.0.0-alpha1, 1.0.0-beta2) are supported"
|
||||
;;
|
||||
esac
|
||||
elif [[ "${{ startsWith(github.ref, 'refs/tags/') }}" == "true" ]]; then
|
||||
# Tag push - release or prerelease
|
||||
tag_name="${GITHUB_REF#refs/tags/}"
|
||||
version="${tag_name}"
|
||||
|
||||
# Check if this is a prerelease
|
||||
if [[ "$tag_name" == *"alpha"* ]] || [[ "$tag_name" == *"beta"* ]] || [[ "$tag_name" == *"rc"* ]]; then
|
||||
build_type="prerelease"
|
||||
is_prerelease=true
|
||||
echo "🚀 Docker prerelease build detected: $tag_name"
|
||||
else
|
||||
build_type="release"
|
||||
create_latest=true
|
||||
echo "📦 Docker release build detected: $tag_name"
|
||||
fi
|
||||
elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
# Main branch push - development build
|
||||
build_type="development"
|
||||
version="dev-${short_sha}"
|
||||
echo "🛠️ Docker development build detected"
|
||||
else
|
||||
# Other branches - development build
|
||||
build_type="development"
|
||||
version="dev-${short_sha}"
|
||||
echo "🔧 Docker development build detected"
|
||||
fi
|
||||
|
||||
# Push only on main branch, tags, or manual trigger
|
||||
if [[ "${{ github.ref }}" == "refs/heads/main" ]] || \
|
||||
[[ "${{ startsWith(github.ref, 'refs/tags/') }}" == "true" ]] || \
|
||||
[[ "${{ github.event.inputs.push_images }}" == "true" ]]; then
|
||||
should_push=true
|
||||
fi
|
||||
|
||||
echo "should_build=$should_build" >> $GITHUB_OUTPUT
|
||||
@@ -210,25 +239,15 @@ jobs:
|
||||
echo " - Create latest: $create_latest"
|
||||
|
||||
# Build multi-arch Docker images
|
||||
# Strategy: Build images using pre-built binaries from dl.rustfs.com
|
||||
# Supports both release and dev channel binaries based on build context
|
||||
# Only runs when should_build is true (which includes workflow success check)
|
||||
build-docker:
|
||||
name: Build Docker Images
|
||||
needs: build-check
|
||||
if: needs.build-check.outputs.should_build == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
variant:
|
||||
- name: production
|
||||
dockerfile: Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: source
|
||||
dockerfile: Dockerfile.source
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- name: dev
|
||||
dockerfile: Dockerfile.source
|
||||
platforms: linux/amd64,linux/arm64
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -236,9 +255,8 @@ jobs:
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
username: ${{ env.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
scopes: repository:rustfs/rustfs:pull,push
|
||||
|
||||
# - name: Login to GitHub Container Registry
|
||||
# uses: docker/login-action@v3
|
||||
@@ -260,57 +278,54 @@ jobs:
|
||||
VERSION="${{ needs.build-check.outputs.version }}"
|
||||
SHORT_SHA="${{ needs.build-check.outputs.short_sha }}"
|
||||
CREATE_LATEST="${{ needs.build-check.outputs.create_latest }}"
|
||||
VARIANT="${{ matrix.variant.name }}"
|
||||
|
||||
# Convert version format for Dockerfile compatibility
|
||||
case "$VERSION" in
|
||||
"latest")
|
||||
# For stable latest, use RELEASE=latest + release CHANNEL
|
||||
DOCKER_RELEASE="latest"
|
||||
DOCKER_CHANNEL="release"
|
||||
;;
|
||||
v*)
|
||||
# For versioned releases (v1.0.0), remove 'v' prefix for Dockerfile
|
||||
DOCKER_RELEASE="${VERSION#v}"
|
||||
DOCKER_CHANNEL="release"
|
||||
;;
|
||||
*)
|
||||
# For other versions, pass as-is
|
||||
DOCKER_RELEASE="${VERSION}"
|
||||
DOCKER_CHANNEL="release"
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "docker_release=$DOCKER_RELEASE" >> $GITHUB_OUTPUT
|
||||
echo "docker_channel=$DOCKER_CHANNEL" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "🐳 Docker build parameters:"
|
||||
echo " - Original version: $VERSION"
|
||||
echo " - Docker RELEASE: $DOCKER_RELEASE"
|
||||
echo " - Docker CHANNEL: $DOCKER_CHANNEL"
|
||||
|
||||
# Generate tags based on build type
|
||||
TAGS=""
|
||||
# Only support release and prerelease builds (no development builds)
|
||||
TAGS="${{ env.REGISTRY_DOCKERHUB }}:${VERSION}"
|
||||
|
||||
if [[ "$BUILD_TYPE" == "development" ]]; then
|
||||
# Development build: dev-${short_sha}-${variant} and dev-${variant}
|
||||
TAGS="${{ env.REGISTRY_DOCKERHUB }}:dev-${SHORT_SHA}-${VARIANT}"
|
||||
|
||||
# Add rolling dev tag for each variant
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:dev-${VARIANT}"
|
||||
|
||||
# Special handling for production variant
|
||||
if [[ "$VARIANT" == "production" ]]; then
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:dev-${SHORT_SHA}"
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:dev"
|
||||
fi
|
||||
else
|
||||
# Release/Prerelease build: ${version}-${variant}
|
||||
TAGS="${{ env.REGISTRY_DOCKERHUB }}:${VERSION}-${VARIANT}"
|
||||
|
||||
# Special handling for production variant - create main version tag
|
||||
if [[ "$VARIANT" == "production" ]]; then
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:${VERSION}"
|
||||
# Add channel tags for prereleases and latest for stable
|
||||
if [[ "$CREATE_LATEST" == "true" ]]; then
|
||||
# Stable release
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:latest"
|
||||
elif [[ "$BUILD_TYPE" == "prerelease" ]]; then
|
||||
# Prerelease channel tags (alpha, beta, rc)
|
||||
if [[ "$VERSION" == *"alpha"* ]]; then
|
||||
CHANNEL="alpha"
|
||||
elif [[ "$VERSION" == *"beta"* ]]; then
|
||||
CHANNEL="beta"
|
||||
elif [[ "$VERSION" == *"rc"* ]]; then
|
||||
CHANNEL="rc"
|
||||
fi
|
||||
|
||||
# Add channel tags for prereleases and latest for stable
|
||||
if [[ "$CREATE_LATEST" == "true" ]]; then
|
||||
# Stable release
|
||||
if [[ "$VARIANT" == "production" ]]; then
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:latest"
|
||||
else
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:latest-${VARIANT}"
|
||||
fi
|
||||
elif [[ "$BUILD_TYPE" == "prerelease" ]]; then
|
||||
# Prerelease channel tags (alpha, beta, rc)
|
||||
if [[ "$VERSION" == *"alpha"* ]]; then
|
||||
CHANNEL="alpha"
|
||||
elif [[ "$VERSION" == *"beta"* ]]; then
|
||||
CHANNEL="beta"
|
||||
elif [[ "$VERSION" == *"rc"* ]]; then
|
||||
CHANNEL="rc"
|
||||
fi
|
||||
|
||||
if [[ -n "$CHANNEL" ]]; then
|
||||
if [[ "$VARIANT" == "production" ]]; then
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:${CHANNEL}"
|
||||
else
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:${CHANNEL}-${VARIANT}"
|
||||
fi
|
||||
fi
|
||||
if [[ -n "$CHANNEL" ]]; then
|
||||
TAGS="$TAGS,${{ env.REGISTRY_DOCKERHUB }}:${CHANNEL}"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -324,7 +339,6 @@ jobs:
|
||||
LABELS="$LABELS,org.opencontainers.image.revision=${{ github.sha }}"
|
||||
LABELS="$LABELS,org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}"
|
||||
LABELS="$LABELS,org.opencontainers.image.created=$(date -u +'%Y-%m-%dT%H:%M:%SZ')"
|
||||
LABELS="$LABELS,org.opencontainers.image.variant=$VARIANT"
|
||||
LABELS="$LABELS,org.opencontainers.image.build-type=$BUILD_TYPE"
|
||||
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
@@ -338,20 +352,22 @@ jobs:
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.variant.dockerfile }}
|
||||
platforms: ${{ matrix.variant.platforms }}
|
||||
file: Dockerfile
|
||||
platforms: ${{ env.DOCKER_PLATFORMS }}
|
||||
push: ${{ needs.build-check.outputs.should_push == 'true' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: |
|
||||
type=gha,scope=docker-${{ matrix.variant.name }}
|
||||
type=gha,scope=docker-binary
|
||||
cache-to: |
|
||||
type=gha,mode=max,scope=docker-${{ matrix.variant.name }}
|
||||
type=gha,mode=max,scope=docker-binary
|
||||
build-args: |
|
||||
BUILDTIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||
VERSION=${{ needs.build-check.outputs.version }}
|
||||
BUILD_TYPE=${{ needs.build-check.outputs.build_type }}
|
||||
REVISION=${{ github.sha }}
|
||||
RELEASE=${{ steps.meta.outputs.docker_release }}
|
||||
CHANNEL=${{ steps.meta.outputs.docker_channel }}
|
||||
BUILDKIT_INLINE_CACHE=1
|
||||
# Enable advanced BuildKit features for better performance
|
||||
provenance: false
|
||||
@@ -360,38 +376,8 @@ jobs:
|
||||
no-cache: false
|
||||
pull: true
|
||||
|
||||
# Create manifest for main production image (only for stable releases)
|
||||
create-manifest:
|
||||
name: Create Manifest
|
||||
needs: [build-check, build-docker]
|
||||
if: needs.build-check.outputs.should_push == 'true' && needs.build-check.outputs.create_latest == 'true' && needs.build-check.outputs.build_type == 'release'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# - name: Login to GitHub Container Registry
|
||||
# uses: docker/login-action@v3
|
||||
# with:
|
||||
# registry: ghcr.io
|
||||
# username: ${{ github.actor }}
|
||||
# password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create and push manifest
|
||||
run: |
|
||||
VERSION="${{ needs.build-check.outputs.version }}"
|
||||
|
||||
echo "🐳 Creating manifest for stable release: $VERSION"
|
||||
|
||||
# Create main image tag (without variant suffix) for stable releases only
|
||||
# Note: The "production" variant already creates the main tags without suffix
|
||||
echo "Manifest creation is handled by the production variant build step"
|
||||
echo "Main tags ${VERSION} and latest are created directly by the production variant"
|
||||
|
||||
echo "✅ Manifest created successfully for stable release"
|
||||
# Note: Manifest creation is no longer needed as we only build one variant
|
||||
# Multi-arch manifests are automatically created by docker/build-push-action
|
||||
|
||||
# Docker build summary
|
||||
docker-summary:
|
||||
@@ -409,23 +395,23 @@ jobs:
|
||||
echo "🐳 Docker build completed successfully!"
|
||||
echo "📦 Build type: $BUILD_TYPE"
|
||||
echo "🔢 Version: $VERSION"
|
||||
echo "🚀 Strategy: Images using pre-built binaries (release channel only)"
|
||||
echo ""
|
||||
|
||||
case "$BUILD_TYPE" in
|
||||
"development")
|
||||
echo "🛠️ Development Docker images have been built with dev-${VERSION} tags"
|
||||
echo "⚠️ These are development images - not suitable for production use"
|
||||
;;
|
||||
"release")
|
||||
echo "🚀 Release Docker images have been built with v${VERSION} tags"
|
||||
echo "✅ These images are ready for production use"
|
||||
echo "🚀 Release Docker image has been built with ${VERSION} tags"
|
||||
echo "✅ This image is ready for production use"
|
||||
if [[ "$CREATE_LATEST" == "true" ]]; then
|
||||
echo "🏷️ Latest tags have been created for stable release"
|
||||
echo "🏷️ Latest tag has been created for stable release"
|
||||
fi
|
||||
;;
|
||||
"prerelease")
|
||||
echo "🧪 Prerelease Docker images have been built with v${VERSION} tags"
|
||||
echo "⚠️ These are prerelease images - use with caution"
|
||||
echo "🚫 Latest tags NOT created for prerelease"
|
||||
echo "🧪 Prerelease Docker image has been built with ${VERSION} tags"
|
||||
echo "⚠️ This is a prerelease image - use with caution"
|
||||
echo "🚫 Latest tag NOT created for prerelease"
|
||||
;;
|
||||
*)
|
||||
echo "❌ Unexpected build type: $BUILD_TYPE"
|
||||
;;
|
||||
esac
|
||||
|
||||
78
.github/workflows/release-notes-template.md
vendored
78
.github/workflows/release-notes-template.md
vendored
@@ -1,78 +0,0 @@
|
||||
## RustFS ${VERSION_CLEAN}
|
||||
|
||||
${ORIGINAL_NOTES}
|
||||
|
||||
---
|
||||
|
||||
### 🚀 Quick Download
|
||||
|
||||
**Linux (Static Binaries - No Dependencies):**
|
||||
|
||||
```bash
|
||||
# x86_64 (Intel/AMD)
|
||||
curl -LO https://github.com/rustfs/rustfs/releases/download/${VERSION}/rustfs-x86_64-unknown-linux-musl.zip
|
||||
unzip rustfs-x86_64-unknown-linux-musl.zip
|
||||
sudo mv rustfs /usr/local/bin/
|
||||
|
||||
# ARM64 (Graviton, Apple Silicon VMs)
|
||||
curl -LO https://github.com/rustfs/rustfs/releases/download/${VERSION}/rustfs-aarch64-unknown-linux-musl.zip
|
||||
unzip rustfs-aarch64-unknown-linux-musl.zip
|
||||
sudo mv rustfs /usr/local/bin/
|
||||
```
|
||||
|
||||
**macOS:**
|
||||
|
||||
```bash
|
||||
# Apple Silicon (M1/M2/M3)
|
||||
curl -LO https://github.com/rustfs/rustfs/releases/download/${VERSION}/rustfs-aarch64-apple-darwin.zip
|
||||
unzip rustfs-aarch64-apple-darwin.zip
|
||||
sudo mv rustfs /usr/local/bin/
|
||||
|
||||
# Intel
|
||||
curl -LO https://github.com/rustfs/rustfs/releases/download/${VERSION}/rustfs-x86_64-apple-darwin.zip
|
||||
unzip rustfs-x86_64-apple-darwin.zip
|
||||
sudo mv rustfs /usr/local/bin/
|
||||
```
|
||||
|
||||
### 📁 Available Downloads
|
||||
|
||||
| Platform | Architecture | File | Description |
|
||||
|----------|-------------|------|-------------|
|
||||
| Linux | x86_64 | `rustfs-x86_64-unknown-linux-musl.zip` | Static binary, no dependencies |
|
||||
| Linux | ARM64 | `rustfs-aarch64-unknown-linux-musl.zip` | Static binary, no dependencies |
|
||||
| macOS | Apple Silicon | `rustfs-aarch64-apple-darwin.zip` | Native binary, ZIP archive |
|
||||
| macOS | Intel | `rustfs-x86_64-apple-darwin.zip` | Native binary, ZIP archive |
|
||||
|
||||
### 🔐 Verification
|
||||
|
||||
Download checksums and verify your download:
|
||||
|
||||
```bash
|
||||
# Download checksums
|
||||
curl -LO https://github.com/rustfs/rustfs/releases/download/${VERSION}/SHA256SUMS
|
||||
|
||||
# Verify (Linux)
|
||||
sha256sum -c SHA256SUMS --ignore-missing
|
||||
|
||||
# Verify (macOS)
|
||||
shasum -a 256 -c SHA256SUMS --ignore-missing
|
||||
```
|
||||
|
||||
### 🛠️ System Requirements
|
||||
|
||||
- **Linux**: Any distribution with glibc 2.17+ (CentOS 7+, Ubuntu 16.04+)
|
||||
- **macOS**: 10.15+ (Catalina or later)
|
||||
- **Windows**: Windows 10 version 1809 or later
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
- [Installation Guide](https://github.com/rustfs/rustfs#installation)
|
||||
- [Quick Start](https://github.com/rustfs/rustfs#quick-start)
|
||||
- [Configuration](https://github.com/rustfs/rustfs/blob/main/docs/)
|
||||
- [API Documentation](https://docs.rs/rustfs)
|
||||
|
||||
### 🆘 Support
|
||||
|
||||
- 🐛 [Report Issues](https://github.com/rustfs/rustfs/issues)
|
||||
- 💬 [Community Discussions](https://github.com/rustfs/rustfs/discussions)
|
||||
- 📖 [Documentation](https://github.com/rustfs/rustfs/tree/main/docs)
|
||||
353
.github/workflows/release.yml
vendored
353
.github/workflows/release.yml
vendored
@@ -1,353 +0,0 @@
|
||||
# Copyright 2024 RustFS Team
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["*.*.*"]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Tag to create release for"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
# Determine release type
|
||||
release-check:
|
||||
name: Release Type Check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
tag: ${{ steps.check.outputs.tag }}
|
||||
version: ${{ steps.check.outputs.version }}
|
||||
is_prerelease: ${{ steps.check.outputs.is_prerelease }}
|
||||
release_type: ${{ steps.check.outputs.release_type }}
|
||||
steps:
|
||||
- name: Determine release type
|
||||
id: check
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
else
|
||||
TAG="${GITHUB_REF#refs/tags/}"
|
||||
fi
|
||||
|
||||
VERSION="${TAG}"
|
||||
|
||||
# Check if this is a prerelease
|
||||
IS_PRERELEASE=false
|
||||
RELEASE_TYPE="release"
|
||||
|
||||
if [[ "$TAG" == *"alpha"* ]] || [[ "$TAG" == *"beta"* ]] || [[ "$TAG" == *"rc"* ]]; then
|
||||
IS_PRERELEASE=true
|
||||
if [[ "$TAG" == *"alpha"* ]]; then
|
||||
RELEASE_TYPE="alpha"
|
||||
elif [[ "$TAG" == *"beta"* ]]; then
|
||||
RELEASE_TYPE="beta"
|
||||
elif [[ "$TAG" == *"rc"* ]]; then
|
||||
RELEASE_TYPE="rc"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "is_prerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
echo "release_type=$RELEASE_TYPE" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "📦 Release Type: $RELEASE_TYPE"
|
||||
echo "🏷️ Tag: $TAG"
|
||||
echo "🔢 Version: $VERSION"
|
||||
echo "🚀 Is Prerelease: $IS_PRERELEASE"
|
||||
|
||||
# Create GitHub Release
|
||||
create-release:
|
||||
name: Create GitHub Release
|
||||
needs: release-check
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
outputs:
|
||||
release_id: ${{ steps.create.outputs.release_id }}
|
||||
release_url: ${{ steps.create.outputs.release_url }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create GitHub Release
|
||||
id: create
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
TAG="${{ needs.release-check.outputs.tag }}"
|
||||
VERSION="${{ needs.release-check.outputs.version }}"
|
||||
IS_PRERELEASE="${{ needs.release-check.outputs.is_prerelease }}"
|
||||
RELEASE_TYPE="${{ needs.release-check.outputs.release_type }}"
|
||||
|
||||
# Check if release already exists
|
||||
if gh release view "$TAG" >/dev/null 2>&1; then
|
||||
echo "Release $TAG already exists"
|
||||
RELEASE_ID=$(gh release view "$TAG" --json databaseId --jq '.databaseId')
|
||||
RELEASE_URL=$(gh release view "$TAG" --json url --jq '.url')
|
||||
else
|
||||
# Get release notes from tag message
|
||||
RELEASE_NOTES=$(git tag -l --format='%(contents)' "${TAG}")
|
||||
if [[ -z "$RELEASE_NOTES" || "$RELEASE_NOTES" =~ ^[[:space:]]*$ ]]; then
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
RELEASE_NOTES="Pre-release ${VERSION} (${RELEASE_TYPE})"
|
||||
else
|
||||
RELEASE_NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create release title
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
TITLE="RustFS $VERSION (${RELEASE_TYPE})"
|
||||
else
|
||||
TITLE="RustFS $VERSION"
|
||||
fi
|
||||
|
||||
# Create the release
|
||||
PRERELEASE_FLAG=""
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
PRERELEASE_FLAG="--prerelease"
|
||||
fi
|
||||
|
||||
gh release create "$TAG" \
|
||||
--title "$TITLE" \
|
||||
--notes "$RELEASE_NOTES" \
|
||||
$PRERELEASE_FLAG \
|
||||
--draft
|
||||
|
||||
RELEASE_ID=$(gh release view "$TAG" --json databaseId --jq '.databaseId')
|
||||
RELEASE_URL=$(gh release view "$TAG" --json url --jq '.url')
|
||||
fi
|
||||
|
||||
echo "release_id=$RELEASE_ID" >> $GITHUB_OUTPUT
|
||||
echo "release_url=$RELEASE_URL" >> $GITHUB_OUTPUT
|
||||
echo "Created release: $RELEASE_URL"
|
||||
|
||||
# Wait for build artifacts from build.yml
|
||||
wait-for-artifacts:
|
||||
name: Wait for Build Artifacts
|
||||
needs: release-check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Wait for build workflow
|
||||
uses: lewagon/wait-on-check-action@v1.3.1
|
||||
with:
|
||||
ref: ${{ needs.release-check.outputs.tag }}
|
||||
check-name: "Build RustFS"
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
wait-interval: 30
|
||||
allowed-conclusions: success
|
||||
|
||||
# Download and prepare release assets
|
||||
prepare-assets:
|
||||
name: Prepare Release Assets
|
||||
needs: [release-check, wait-for-artifacts]
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
assets_prepared: ${{ steps.prepare.outputs.assets_prepared }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download artifacts from build workflow
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./artifacts
|
||||
pattern: rustfs-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare release assets
|
||||
id: prepare
|
||||
run: |
|
||||
VERSION="${{ needs.release-check.outputs.version }}"
|
||||
TAG="${{ needs.release-check.outputs.tag }}"
|
||||
|
||||
mkdir -p ./release-assets
|
||||
|
||||
# Copy and verify artifacts
|
||||
ASSETS_COUNT=0
|
||||
for file in ./artifacts/rustfs-*.zip; do
|
||||
if [[ -f "$file" ]]; then
|
||||
cp "$file" ./release-assets/
|
||||
ASSETS_COUNT=$((ASSETS_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $ASSETS_COUNT -eq 0 ]]; then
|
||||
echo "❌ No artifacts found!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd ./release-assets
|
||||
|
||||
# Generate checksums
|
||||
if ls *.zip >/dev/null 2>&1; then
|
||||
sha256sum *.zip > SHA256SUMS
|
||||
sha512sum *.zip > SHA512SUMS
|
||||
fi
|
||||
|
||||
# TODO: Add GPG signing for signatures
|
||||
# For now, create placeholder signature files
|
||||
for file in *.zip; do
|
||||
echo "# Signature for $file" > "${file}.asc"
|
||||
echo "# GPG signature will be added in future versions" >> "${file}.asc"
|
||||
done
|
||||
|
||||
echo "assets_prepared=true" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "📦 Prepared assets:"
|
||||
ls -la
|
||||
|
||||
echo "🔢 Asset count: $ASSETS_COUNT"
|
||||
|
||||
- name: Upload prepared assets
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-assets-${{ needs.release-check.outputs.tag }}
|
||||
path: ./release-assets/
|
||||
retention-days: 30
|
||||
|
||||
# Upload assets to GitHub Release
|
||||
upload-assets:
|
||||
name: Upload Release Assets
|
||||
needs: [release-check, create-release, prepare-assets]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Download prepared assets
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-assets-${{ needs.release-check.outputs.tag }}
|
||||
path: ./release-assets
|
||||
|
||||
- name: Upload to GitHub Release
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
TAG="${{ needs.release-check.outputs.tag }}"
|
||||
|
||||
cd ./release-assets
|
||||
|
||||
# Upload all files
|
||||
for file in *; do
|
||||
if [[ -f "$file" ]]; then
|
||||
echo "📤 Uploading $file..."
|
||||
gh release upload "$TAG" "$file" --clobber
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ All assets uploaded successfully"
|
||||
|
||||
# Update latest.json for stable releases only
|
||||
update-latest:
|
||||
name: Update Latest Version
|
||||
needs: [release-check, upload-assets]
|
||||
if: needs.release-check.outputs.is_prerelease == 'false'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update latest.json
|
||||
env:
|
||||
OSS_ACCESS_KEY_ID: ${{ secrets.ALICLOUDOSS_KEY_ID }}
|
||||
OSS_ACCESS_KEY_SECRET: ${{ secrets.ALICLOUDOSS_KEY_SECRET }}
|
||||
run: |
|
||||
if [[ -z "$OSS_ACCESS_KEY_ID" ]]; then
|
||||
echo "⚠️ OSS credentials not available, skipping latest.json update"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
VERSION="${{ needs.release-check.outputs.version }}"
|
||||
TAG="${{ needs.release-check.outputs.tag }}"
|
||||
|
||||
# Install ossutil
|
||||
OSSUTIL_VERSION="2.1.1"
|
||||
OSSUTIL_ZIP="ossutil-${OSSUTIL_VERSION}-linux-amd64.zip"
|
||||
OSSUTIL_DIR="ossutil-${OSSUTIL_VERSION}-linux-amd64"
|
||||
|
||||
curl -o "$OSSUTIL_ZIP" "https://gosspublic.alicdn.com/ossutil/v2/${OSSUTIL_VERSION}/${OSSUTIL_ZIP}"
|
||||
unzip "$OSSUTIL_ZIP"
|
||||
chmod +x "${OSSUTIL_DIR}/ossutil"
|
||||
|
||||
# Create latest.json
|
||||
cat > latest.json << EOF
|
||||
{
|
||||
"version": "${VERSION}",
|
||||
"tag": "${TAG}",
|
||||
"release_date": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
||||
"release_type": "stable",
|
||||
"download_url": "https://github.com/${{ github.repository }}/releases/tag/${TAG}"
|
||||
}
|
||||
EOF
|
||||
|
||||
# Upload to OSS
|
||||
./${OSSUTIL_DIR}/ossutil cp latest.json oss://rustfs-version/latest.json --force
|
||||
|
||||
echo "✅ Updated latest.json for stable release $VERSION"
|
||||
|
||||
# Publish release (remove draft status)
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
needs: [release-check, create-release, upload-assets]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update release notes and publish
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
TAG="${{ needs.release-check.outputs.tag }}"
|
||||
VERSION="${{ needs.release-check.outputs.version }}"
|
||||
IS_PRERELEASE="${{ needs.release-check.outputs.is_prerelease }}"
|
||||
RELEASE_TYPE="${{ needs.release-check.outputs.release_type }}"
|
||||
|
||||
# Get original release notes from tag
|
||||
ORIGINAL_NOTES=$(git tag -l --format='%(contents)' "${TAG}")
|
||||
if [[ -z "$ORIGINAL_NOTES" || "$ORIGINAL_NOTES" =~ ^[[:space:]]*$ ]]; then
|
||||
if [[ "$IS_PRERELEASE" == "true" ]]; then
|
||||
ORIGINAL_NOTES="Pre-release ${VERSION} (${RELEASE_TYPE})"
|
||||
else
|
||||
ORIGINAL_NOTES="Release ${VERSION}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use release notes template if available
|
||||
if [[ -f ".github/workflows/release-notes-template.md" ]]; then
|
||||
# Substitute variables in template
|
||||
sed -e "s/\${VERSION}/$TAG/g" \
|
||||
-e "s/\${VERSION_CLEAN}/$VERSION/g" \
|
||||
-e "s/\${ORIGINAL_NOTES}/$(echo "$ORIGINAL_NOTES" | sed 's/[[\.*^$()+?{|]/\\&/g')/g" \
|
||||
.github/workflows/release-notes-template.md > enhanced_notes.md
|
||||
|
||||
# Update release notes
|
||||
gh release edit "$TAG" --notes-file enhanced_notes.md
|
||||
fi
|
||||
|
||||
# Publish the release (remove draft status)
|
||||
gh release edit "$TAG" --draft=false
|
||||
|
||||
echo "🎉 Released $TAG successfully!"
|
||||
echo "📄 Release URL: ${{ needs.create-release.outputs.release_url }}"
|
||||
247
Cargo.lock
generated
247
Cargo.lock
generated
@@ -455,7 +455,10 @@ dependencies = [
|
||||
"serde_repr",
|
||||
"tokio",
|
||||
"url",
|
||||
"zbus 5.8.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols",
|
||||
"zbus 5.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -512,7 +515,7 @@ dependencies = [
|
||||
"futures-lite",
|
||||
"parking",
|
||||
"polling",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"slab",
|
||||
"tracing",
|
||||
"windows-sys 0.59.0",
|
||||
@@ -544,7 +547,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"event-listener",
|
||||
"futures-lite",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -571,7 +574,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"windows-sys 0.59.0",
|
||||
@@ -673,9 +676,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "aws-credential-types"
|
||||
version = "1.2.3"
|
||||
version = "1.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "687bc16bc431a8533fe0097c7f0182874767f920989d7260950172ae8e3c4465"
|
||||
checksum = "b68c2194a190e1efc999612792e25b1ab3abfefe4306494efaaabc25933c0cbe"
|
||||
dependencies = [
|
||||
"aws-smithy-async",
|
||||
"aws-smithy-runtime-api",
|
||||
@@ -708,9 +711,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "aws-runtime"
|
||||
version = "1.5.8"
|
||||
version = "1.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f6c68419d8ba16d9a7463671593c54f81ba58cab466e9b759418da606dcc2e2"
|
||||
checksum = "b2090e664216c78e766b6bac10fe74d2f451c02441d43484cd76ac9a295075f7"
|
||||
dependencies = [
|
||||
"aws-credential-types",
|
||||
"aws-sigv4",
|
||||
@@ -733,9 +736,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "aws-sdk-s3"
|
||||
version = "1.96.0"
|
||||
version = "1.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e25d24de44b34dcdd5182ac4e4c6f07bcec2661c505acef94c0d293b65505fe"
|
||||
checksum = "029e89cae7e628553643aecb3a3f054a0a0912ff0fd1f5d6a0b4fda421dce64b"
|
||||
dependencies = [
|
||||
"aws-credential-types",
|
||||
"aws-runtime",
|
||||
@@ -806,9 +809,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "aws-smithy-checksums"
|
||||
version = "0.63.4"
|
||||
version = "0.63.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "244f00666380d35c1c76b90f7b88a11935d11b84076ac22a4c014ea0939627af"
|
||||
checksum = "5ab9472f7a8ec259ddb5681d2ef1cb1cf16c0411890063e67cdc7b62562cc496"
|
||||
dependencies = [
|
||||
"aws-smithy-http",
|
||||
"aws-smithy-types",
|
||||
@@ -826,9 +829,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "aws-smithy-eventstream"
|
||||
version = "0.60.9"
|
||||
version = "0.60.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "338a3642c399c0a5d157648426110e199ca7fd1c689cc395676b81aa563700c4"
|
||||
checksum = "604c7aec361252b8f1c871a7641d5e0ba3a7f5a586e51b66bc9510a5519594d9"
|
||||
dependencies = [
|
||||
"aws-smithy-types",
|
||||
"bytes",
|
||||
@@ -837,9 +840,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "aws-smithy-http"
|
||||
version = "0.62.1"
|
||||
version = "0.62.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "99335bec6cdc50a346fda1437f9fefe33abf8c99060739a546a16457f2862ca9"
|
||||
checksum = "43c82ba4cab184ea61f6edaafc1072aad3c2a17dcf4c0fce19ac5694b90d8b5f"
|
||||
dependencies = [
|
||||
"aws-smithy-eventstream",
|
||||
"aws-smithy-runtime-api",
|
||||
@@ -865,7 +868,7 @@ dependencies = [
|
||||
"aws-smithy-async",
|
||||
"aws-smithy-runtime-api",
|
||||
"aws-smithy-types",
|
||||
"h2 0.3.26",
|
||||
"h2 0.3.27",
|
||||
"h2 0.4.11",
|
||||
"http 0.2.12",
|
||||
"http 1.3.1",
|
||||
@@ -1574,23 +1577,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "chrono-tz"
|
||||
version = "0.10.3"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "efdce149c370f133a071ca8ef6ea340b7b88748ab0810097a9e2976eaa34b4f3"
|
||||
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"chrono-tz-build",
|
||||
"phf 0.11.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chrono-tz-build"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f10f8c9340e31fc120ff885fcdb54a0b48e474bbd77cab557f0c30a3e569402"
|
||||
dependencies = [
|
||||
"parse-zoneinfo",
|
||||
"phf_codegen 0.11.3",
|
||||
"phf 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1850,18 +1842,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "const-str"
|
||||
version = "0.6.2"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e991226a70654b49d34de5ed064885f0bef0348a8e70018b8ff1ac80aa984a2"
|
||||
checksum = "041fbfcf8e7054df725fb9985297e92422cdc80fcf313665f5ca3d761bb63f4c"
|
||||
dependencies = [
|
||||
"const-str-proc-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const-str-proc-macro"
|
||||
version = "0.6.2"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0d1c4c3cb85e5856b34e829af0035d7154f8c2889b15bbf43c8a6c6786dcab5"
|
||||
checksum = "f801882b7ecd4188f4bca0317f34e022d623590d85893d7024b18d14f2a3b40b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2031,9 +2023,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.4.2"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
|
||||
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
@@ -3384,18 +3376,6 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b"
|
||||
|
||||
[[package]]
|
||||
name = "dispatch2"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a0d569e003ff27784e0e14e4a594048698e0c0f0b66cabcb51511be55a7caa0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"block2 0.6.1",
|
||||
"libc",
|
||||
"objc2 0.6.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dispatch2"
|
||||
version = "0.3.0"
|
||||
@@ -3403,6 +3383,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"block2 0.6.1",
|
||||
"libc",
|
||||
"objc2 0.6.1",
|
||||
]
|
||||
|
||||
@@ -3417,6 +3399,15 @@ dependencies = [
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dlib"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412"
|
||||
dependencies = [
|
||||
"libloading 0.8.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dlopen2"
|
||||
version = "0.7.0"
|
||||
@@ -3440,6 +3431,12 @@ dependencies = [
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "downcast-rs"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2"
|
||||
|
||||
[[package]]
|
||||
name = "dpi"
|
||||
version = "0.1.2"
|
||||
@@ -4375,9 +4372,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.26"
|
||||
version = "0.3.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8"
|
||||
checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
@@ -4623,7 +4620,7 @@ dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"h2 0.3.26",
|
||||
"h2 0.3.27",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"httparse",
|
||||
@@ -6212,7 +6209,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"dispatch2 0.3.0",
|
||||
"dispatch2",
|
||||
"objc2 0.6.1",
|
||||
]
|
||||
|
||||
@@ -6611,15 +6608,6 @@ dependencies = [
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse-zoneinfo"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24"
|
||||
dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "password-hash"
|
||||
version = "0.5.0"
|
||||
@@ -6728,11 +6716,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.11.3"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
|
||||
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
|
||||
dependencies = [
|
||||
"phf_shared 0.11.3",
|
||||
"phf_shared 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6755,16 +6743,6 @@ dependencies = [
|
||||
"phf_shared 0.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_codegen"
|
||||
version = "0.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
|
||||
dependencies = [
|
||||
"phf_generator 0.11.3",
|
||||
"phf_shared 0.11.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_generator"
|
||||
version = "0.8.0"
|
||||
@@ -6836,6 +6814,15 @@ dependencies = [
|
||||
"siphasher 1.0.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
|
||||
dependencies = [
|
||||
"siphasher 1.0.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "1.1.10"
|
||||
@@ -6967,7 +6954,7 @@ dependencies = [
|
||||
"concurrent-queue",
|
||||
"hermit-abi",
|
||||
"pin-project-lite",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"tracing",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
@@ -7647,13 +7634,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rfd"
|
||||
version = "0.15.3"
|
||||
version = "0.15.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80c844748fdc82aae252ee4594a89b6e7ebef1063de7951545564cbc4e57075d"
|
||||
checksum = "ef2bee61e6cffa4635c72d7d81a84294e28f0930db0ddcb0f66d10244674ebed"
|
||||
dependencies = [
|
||||
"ashpd 0.11.0",
|
||||
"block2 0.6.1",
|
||||
"dispatch2 0.2.0",
|
||||
"dispatch2",
|
||||
"js-sys",
|
||||
"log",
|
||||
"objc2 0.6.1",
|
||||
@@ -8107,7 +8094,7 @@ dependencies = [
|
||||
"dirs",
|
||||
"hex",
|
||||
"keyring",
|
||||
"rfd 0.15.3",
|
||||
"rfd 0.15.4",
|
||||
"rust-embed",
|
||||
"rust-i18n",
|
||||
"serde",
|
||||
@@ -8443,15 +8430,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "1.0.7"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
|
||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8675,6 +8662,12 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scoped-tls"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
@@ -9698,7 +9691,7 @@ dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -10079,7 +10072,7 @@ dependencies = [
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_write",
|
||||
"winnow 0.7.11",
|
||||
"winnow 0.7.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10769,6 +10762,66 @@ dependencies = [
|
||||
"web-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-backend"
|
||||
version = "0.3.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe770181423e5fc79d3e2a7f4410b7799d5aab1de4372853de3c6aa13ca24121"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"downcast-rs",
|
||||
"rustix 0.38.44",
|
||||
"scoped-tls",
|
||||
"smallvec",
|
||||
"wayland-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-client"
|
||||
version = "0.31.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "978fa7c67b0847dbd6a9f350ca2569174974cd4082737054dbb7fbb79d7d9a61"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"rustix 0.38.44",
|
||||
"wayland-backend",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols"
|
||||
version = "0.32.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "779075454e1e9a521794fed15886323ea0feda3f8b0fc1390f5398141310422a"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-scanner"
|
||||
version = "0.31.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "896fdafd5d28145fce7958917d69f2fd44469b1d4e861cb5961bcbeebc6d1484"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quick-xml 0.37.5",
|
||||
"quote",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-sys"
|
||||
version = "0.31.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbcebb399c77d5aa9fa5db874806ee7b4eba4e73650948e8f93963f128896615"
|
||||
dependencies = [
|
||||
"dlib",
|
||||
"log",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.77"
|
||||
@@ -11440,9 +11493,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.7.11"
|
||||
version = "0.7.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd"
|
||||
checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -11542,7 +11595,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11641,9 +11694,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zbus"
|
||||
version = "5.8.0"
|
||||
version = "5.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "597f45e98bc7e6f0988276012797855613cd8269e23b5be62cc4e5d28b7e515d"
|
||||
checksum = "4bb4f9a464286d42851d18a605f7193b8febaf5b0919d71c6399b7b26e5b0aad"
|
||||
dependencies = [
|
||||
"async-broadcast",
|
||||
"async-recursion",
|
||||
@@ -11661,8 +11714,8 @@ dependencies = [
|
||||
"tracing",
|
||||
"uds_windows",
|
||||
"windows-sys 0.59.0",
|
||||
"winnow 0.7.11",
|
||||
"zbus_macros 5.8.0",
|
||||
"winnow 0.7.12",
|
||||
"zbus_macros 5.9.0",
|
||||
"zbus_names 4.2.0",
|
||||
"zvariant 5.6.0",
|
||||
]
|
||||
@@ -11682,9 +11735,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zbus_macros"
|
||||
version = "5.8.0"
|
||||
version = "5.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5c8e4e14dcdd9d97a98b189cd1220f30e8394ad271e8c987da84f73693862c2"
|
||||
checksum = "ef9859f68ee0c4ee2e8cde84737c78e3f4c54f946f2a38645d0d4c7a95327659"
|
||||
dependencies = [
|
||||
"proc-macro-crate 3.3.0",
|
||||
"proc-macro2",
|
||||
@@ -11714,7 +11767,7 @@ checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"static_assertions",
|
||||
"winnow 0.7.11",
|
||||
"winnow 0.7.12",
|
||||
"zvariant 5.6.0",
|
||||
]
|
||||
|
||||
@@ -11906,7 +11959,7 @@ dependencies = [
|
||||
"enumflags2",
|
||||
"serde",
|
||||
"url",
|
||||
"winnow 0.7.11",
|
||||
"winnow 0.7.12",
|
||||
"zvariant_derive 5.6.0",
|
||||
"zvariant_utils 3.2.0",
|
||||
]
|
||||
@@ -11959,5 +12012,5 @@ dependencies = [
|
||||
"serde",
|
||||
"static_assertions",
|
||||
"syn 2.0.104",
|
||||
"winnow 0.7.11",
|
||||
"winnow 0.7.12",
|
||||
]
|
||||
|
||||
@@ -108,8 +108,8 @@ cfg-if = "1.0.1"
|
||||
chacha20poly1305 = { version = "0.10.1" }
|
||||
chrono = { version = "0.4.41", features = ["serde"] }
|
||||
clap = { version = "4.5.41", features = ["derive", "env"] }
|
||||
const-str = { version = "0.6.2", features = ["std", "proc"] }
|
||||
crc32fast = "1.4.2"
|
||||
const-str = { version = "0.6.3", features = ["std", "proc"] }
|
||||
crc32fast = "1.5.0"
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
dashmap = "6.1.0"
|
||||
datafusion = "46.0.1"
|
||||
@@ -196,7 +196,7 @@ reqwest = { version = "0.12.22", default-features = false, features = [
|
||||
"json",
|
||||
"blocking",
|
||||
] }
|
||||
rfd = { version = "0.15.3", default-features = false, features = [
|
||||
rfd = { version = "0.15.4", default-features = false, features = [
|
||||
"xdg-portal",
|
||||
"tokio",
|
||||
] }
|
||||
|
||||
159
Dockerfile
159
Dockerfile
@@ -1,132 +1,95 @@
|
||||
# Multi-stage build for RustFS production image
|
||||
FROM alpine:latest AS build
|
||||
|
||||
# Build arguments
|
||||
# Build stage: Download and extract RustFS binary
|
||||
FROM alpine:3.22 AS build
|
||||
|
||||
# Build arguments for platform and release
|
||||
ARG TARGETARCH
|
||||
ARG RELEASE=latest
|
||||
ARG CHANNEL=release
|
||||
|
||||
# Install dependencies for downloading and verifying binaries
|
||||
RUN apk add --no-cache \
|
||||
ca-certificates \
|
||||
curl \
|
||||
bash \
|
||||
wget \
|
||||
unzip \
|
||||
jq
|
||||
# Install minimal dependencies for downloading and extracting
|
||||
RUN apk add --no-cache ca-certificates curl unzip
|
||||
|
||||
# Create build directory
|
||||
WORKDIR /build
|
||||
|
||||
# Map TARGETARCH to architecture format used in builds
|
||||
# Detect architecture and download corresponding binary
|
||||
RUN case "${TARGETARCH}" in \
|
||||
"amd64") ARCH="x86_64" ;; \
|
||||
"arm64") ARCH="aarch64" ;; \
|
||||
*) echo "Unsupported architecture: ${TARGETARCH}" && exit 1 ;; \
|
||||
amd64) ARCH="x86_64" ;; \
|
||||
arm64) ARCH="aarch64" ;; \
|
||||
*) echo "Unsupported architecture: ${TARGETARCH}" >&2 && exit 1 ;; \
|
||||
esac && \
|
||||
echo "ARCH=${ARCH}" > /build/arch.env
|
||||
|
||||
# Download rustfs binary from dl.rustfs.com
|
||||
RUN . /build/arch.env && \
|
||||
BASE_URL="https://dl.rustfs.com/artifacts/rustfs" && \
|
||||
PLATFORM="linux" && \
|
||||
if [ "${RELEASE}" = "latest" ]; then \
|
||||
# Download latest version from specified channel \
|
||||
if [ "${CHANNEL}" = "dev" ]; then \
|
||||
PACKAGE_NAME="rustfs-${PLATFORM}-${ARCH}-dev-latest.zip"; \
|
||||
DOWNLOAD_URL="${BASE_URL}/dev/${PACKAGE_NAME}"; \
|
||||
echo "📥 Downloading latest dev build: ${PACKAGE_NAME}"; \
|
||||
else \
|
||||
PACKAGE_NAME="rustfs-${PLATFORM}-${ARCH}-latest.zip"; \
|
||||
DOWNLOAD_URL="${BASE_URL}/release/${PACKAGE_NAME}"; \
|
||||
echo "📥 Downloading latest release build: ${PACKAGE_NAME}"; \
|
||||
fi; \
|
||||
VERSION="latest"; \
|
||||
else \
|
||||
# Download specific version (always from release channel) \
|
||||
PACKAGE_NAME="rustfs-${PLATFORM}-${ARCH}-v${RELEASE}.zip"; \
|
||||
DOWNLOAD_URL="${BASE_URL}/release/${PACKAGE_NAME}"; \
|
||||
echo "📥 Downloading specific version: ${PACKAGE_NAME}"; \
|
||||
VERSION="v${RELEASE#v}"; \
|
||||
fi && \
|
||||
echo "🔗 Download URL: ${DOWNLOAD_URL}" && \
|
||||
curl -f -L "${DOWNLOAD_URL}" -o /build/rustfs.zip && \
|
||||
if [ ! -f /build/rustfs.zip ] || [ ! -s /build/rustfs.zip ]; then \
|
||||
echo "❌ Failed to download binary package"; \
|
||||
echo "💡 Make sure the package ${PACKAGE_NAME} exists"; \
|
||||
echo "🔗 Check: ${DOWNLOAD_URL}"; \
|
||||
exit 1; \
|
||||
fi && \
|
||||
unzip /build/rustfs.zip -d /build && \
|
||||
BASE_URL="https://dl.rustfs.com/artifacts/rustfs/release" && \
|
||||
PACKAGE_NAME="rustfs-linux-${ARCH}-${VERSION}.zip" && \
|
||||
DOWNLOAD_URL="${BASE_URL}/${PACKAGE_NAME}" && \
|
||||
echo "Downloading ${PACKAGE_NAME} from ${DOWNLOAD_URL}" >&2 && \
|
||||
curl -f -L "${DOWNLOAD_URL}" -o rustfs.zip && \
|
||||
unzip rustfs.zip -d /build && \
|
||||
chmod +x /build/rustfs && \
|
||||
rm /build/rustfs.zip && \
|
||||
echo "✅ Successfully downloaded and extracted rustfs binary"
|
||||
rm rustfs.zip || { echo "Failed to download or extract ${PACKAGE_NAME}" >&2; exit 1; }
|
||||
|
||||
# Runtime stage
|
||||
FROM alpine:latest
|
||||
# Runtime stage: Configure runtime environment
|
||||
FROM alpine:3.22.1
|
||||
|
||||
# Set build arguments and labels
|
||||
# Build arguments and labels
|
||||
ARG RELEASE=latest
|
||||
ARG CHANNEL=release
|
||||
ARG BUILD_DATE
|
||||
ARG VCS_REF
|
||||
|
||||
LABEL name="RustFS" \
|
||||
vendor="RustFS Team" \
|
||||
maintainer="RustFS Team <dev@rustfs.com>" \
|
||||
version="${RELEASE}" \
|
||||
release="${RELEASE}" \
|
||||
channel="${CHANNEL}" \
|
||||
build-date="${BUILD_DATE}" \
|
||||
vcs-ref="${VCS_REF}" \
|
||||
summary="RustFS is a high-performance distributed object storage system written in Rust, compatible with S3 API." \
|
||||
description="RustFS is a high-performance distributed object storage software built using Rust. It supports erasure coding storage, multi-tenant management, observability, and other enterprise-level features." \
|
||||
url="https://rustfs.com" \
|
||||
license="Apache-2.0"
|
||||
vendor="RustFS Team" \
|
||||
maintainer="RustFS Team <dev@rustfs.com>" \
|
||||
version="${RELEASE}" \
|
||||
release="${RELEASE}" \
|
||||
build-date="${BUILD_DATE}" \
|
||||
vcs-ref="${VCS_REF}" \
|
||||
summary="High-performance distributed object storage system compatible with S3 API" \
|
||||
description="RustFS is a distributed object storage system written in Rust, supporting erasure coding, multi-tenant management, and observability." \
|
||||
url="https://rustfs.com" \
|
||||
license="Apache-2.0"
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apk add --no-cache \
|
||||
ca-certificates \
|
||||
curl \
|
||||
tzdata \
|
||||
bash \
|
||||
&& addgroup -g 1000 rustfs \
|
||||
&& adduser -u 1000 -G rustfs -s /bin/sh -D rustfs
|
||||
RUN echo "https://dl-cdn.alpinelinux.org/alpine/v3.20/community" >> /etc/apk/repositories && \
|
||||
apk update && \
|
||||
apk add --no-cache ca-certificates bash gosu coreutils shadow && \
|
||||
addgroup -g 1000 rustfs && \
|
||||
adduser -u 1000 -G rustfs -s /bin/bash -D rustfs
|
||||
|
||||
# Environment variables
|
||||
ENV RUSTFS_ACCESS_KEY=rustfsadmin \
|
||||
# Copy CA certificates and RustFS binary from build stage
|
||||
COPY --from=build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||
COPY --from=build /build/rustfs /usr/bin/rustfs
|
||||
|
||||
# Copy entry point script
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
# Set permissions
|
||||
RUN chmod +x /usr/bin/rustfs /entrypoint.sh && \
|
||||
mkdir -p /data /logs && \
|
||||
chown rustfs:rustfs /data /logs && \
|
||||
chmod 700 /data /logs
|
||||
|
||||
# Environment variables (credentials should be set via environment or secrets)
|
||||
ENV RUSTFS_ADDRESS=:9000 \
|
||||
RUSTFS_ACCESS_KEY=rustfsadmin \
|
||||
RUSTFS_SECRET_KEY=rustfsadmin \
|
||||
RUSTFS_ADDRESS=":9000" \
|
||||
RUSTFS_CONSOLE_ENABLE=true \
|
||||
RUSTFS_VOLUMES=/data \
|
||||
RUST_LOG=warn
|
||||
|
||||
# Set permissions for /usr/bin (similar to MinIO's approach)
|
||||
RUN chmod -R 755 /usr/bin
|
||||
|
||||
# Copy CA certificates and binaries from build stage
|
||||
COPY --from=build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||
COPY --from=build /build/rustfs /usr/bin/
|
||||
|
||||
# Set executable permissions
|
||||
RUN chmod +x /usr/bin/rustfs
|
||||
|
||||
# Create data directory
|
||||
RUN mkdir -p /data /config && chown -R rustfs:rustfs /data /config
|
||||
|
||||
# Switch to non-root user
|
||||
USER rustfs
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /data
|
||||
RUST_LOG=warn \
|
||||
RUSTFS_OBS_LOG_DIRECTORY=/logs \
|
||||
RUSTFS_SINKS_FILE_PATH=/logs
|
||||
|
||||
# Expose port
|
||||
EXPOSE 9000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --retries=3 --start-period=40s \
|
||||
CMD curl -f http://localhost:9000/health || exit 1
|
||||
# Volumes for data and logs
|
||||
VOLUME ["/data", "/logs"]
|
||||
|
||||
# Volume for data
|
||||
VOLUME ["/data"]
|
||||
# Set entry point
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
CMD ["/usr/bin/rustfs"]
|
||||
|
||||
# Set entrypoint
|
||||
ENTRYPOINT ["/usr/bin/rustfs"]
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
# Multi-stage Dockerfile for RustFS
|
||||
# Multi-stage Dockerfile for RustFS - LOCAL DEVELOPMENT ONLY
|
||||
#
|
||||
# ⚠️ IMPORTANT: This Dockerfile is for local development and testing only.
|
||||
# ⚠️ It builds RustFS from source code and is NOT used in CI/CD pipelines.
|
||||
# ⚠️ CI/CD pipeline uses pre-built binaries from Dockerfile instead.
|
||||
#
|
||||
# Usage for local development:
|
||||
# docker build -f Dockerfile.source -t rustfs:dev-local .
|
||||
# docker run --rm -p 9000:9000 rustfs:dev-local
|
||||
#
|
||||
# Supports cross-compilation for amd64 and arm64 architectures
|
||||
ARG TARGETPLATFORM
|
||||
ARG BUILDPLATFORM
|
||||
@@ -6,6 +15,13 @@ ARG BUILDPLATFORM
|
||||
# Build stage
|
||||
FROM --platform=$BUILDPLATFORM rust:1.88-bookworm AS builder
|
||||
|
||||
# Re-declare build arguments after FROM (required for multi-stage builds)
|
||||
ARG TARGETPLATFORM
|
||||
ARG BUILDPLATFORM
|
||||
|
||||
# Debug: Print platform information
|
||||
RUN echo "🐳 Build Info: BUILDPLATFORM=$BUILDPLATFORM, TARGETPLATFORM=$TARGETPLATFORM"
|
||||
|
||||
# Install required build dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
wget \
|
||||
@@ -18,17 +34,7 @@ RUN apt-get update && apt-get install -y \
|
||||
lld \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install sccache for Rust compilation caching
|
||||
RUN wget https://github.com/mozilla/sccache/releases/download/v0.8.1/sccache-v0.8.1-x86_64-unknown-linux-gnu.tar.gz \
|
||||
&& tar -xzf sccache-v0.8.1-x86_64-unknown-linux-gnu.tar.gz \
|
||||
&& mv sccache-v0.8.1-x86_64-unknown-linux-gnu/sccache /usr/local/bin/ \
|
||||
&& chmod +x /usr/local/bin/sccache \
|
||||
&& rm -rf sccache-v0.8.1-x86_64-unknown-linux-gnu.tar.gz sccache-v0.8.1-x86_64-unknown-linux-gnu
|
||||
|
||||
# Set up sccache environment
|
||||
ENV RUSTC_WRAPPER=sccache \
|
||||
SCCACHE_DIR=/tmp/sccache \
|
||||
SCCACHE_CACHE_SIZE=2G
|
||||
# Note: sccache removed for simpler builds
|
||||
|
||||
# Install cross-compilation tools for ARM64
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||
@@ -49,10 +55,13 @@ RUN wget https://github.com/google/flatbuffers/releases/download/v25.2.10/Linux.
|
||||
&& mv flatc /usr/local/bin/ && chmod +x /usr/local/bin/flatc && rm -rf Linux.flatc.binary.g++-13.zip
|
||||
|
||||
# Set up Rust targets based on platform
|
||||
RUN case "$TARGETPLATFORM" in \
|
||||
RUN set -e && \
|
||||
PLATFORM="${TARGETPLATFORM:-linux/amd64}" && \
|
||||
echo "🎯 Setting up Rust target for platform: $PLATFORM" && \
|
||||
case "$PLATFORM" in \
|
||||
"linux/amd64") rustup target add x86_64-unknown-linux-gnu ;; \
|
||||
"linux/arm64") rustup target add aarch64-unknown-linux-gnu ;; \
|
||||
*) echo "Unsupported platform: $TARGETPLATFORM" && exit 1 ;; \
|
||||
*) echo "❌ Unsupported platform: $PLATFORM" && exit 1 ;; \
|
||||
esac
|
||||
|
||||
# Set up environment for cross-compilation
|
||||
@@ -62,17 +71,8 @@ ENV CXX_aarch64_unknown_linux_gnu=aarch64-linux-gnu-g++
|
||||
|
||||
WORKDIR /usr/src/rustfs
|
||||
|
||||
# Copy cargo configuration for optimized builds
|
||||
COPY cargo.config.toml ./.cargo/config.toml
|
||||
|
||||
# Copy Cargo files for dependency caching
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY */Cargo.toml ./*/
|
||||
|
||||
# Create dummy main.rs files for dependency compilation
|
||||
RUN find . -name "Cargo.toml" -not -path "./Cargo.toml" | \
|
||||
xargs -I {} dirname {} | \
|
||||
xargs -I {} sh -c 'mkdir -p {}/src && echo "fn main() {}" > {}/src/main.rs'
|
||||
# Copy all source code
|
||||
COPY . .
|
||||
|
||||
# Configure cargo for optimized builds
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true \
|
||||
@@ -82,32 +82,27 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=true \
|
||||
CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO=off \
|
||||
CARGO_PROFILE_RELEASE_STRIP=symbols
|
||||
|
||||
# Build dependencies only (cache layer) with optimizations
|
||||
RUN sccache --start-server 2>/dev/null || true && \
|
||||
case "$TARGETPLATFORM" in \
|
||||
"linux/amd64") cargo build --release --target x86_64-unknown-linux-gnu -j $(nproc) ;; \
|
||||
"linux/arm64") cargo build --release --target aarch64-unknown-linux-gnu -j $(nproc) ;; \
|
||||
esac
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
# Generate protobuf code
|
||||
RUN cargo run --bin gproto
|
||||
|
||||
# Build the actual application with optimizations
|
||||
RUN sccache --start-server 2>/dev/null || true && \
|
||||
case "$TARGETPLATFORM" in \
|
||||
RUN case "$TARGETPLATFORM" in \
|
||||
"linux/amd64") \
|
||||
echo "🔨 Building for amd64..." && \
|
||||
rustup target add x86_64-unknown-linux-gnu && \
|
||||
cargo build --release --target x86_64-unknown-linux-gnu --bin rustfs -j $(nproc) && \
|
||||
cp target/x86_64-unknown-linux-gnu/release/rustfs /usr/local/bin/rustfs \
|
||||
;; \
|
||||
"linux/arm64") \
|
||||
echo "🔨 Building for arm64..." && \
|
||||
rustup target add aarch64-unknown-linux-gnu && \
|
||||
cargo build --release --target aarch64-unknown-linux-gnu --bin rustfs -j $(nproc) && \
|
||||
cp target/aarch64-unknown-linux-gnu/release/rustfs /usr/local/bin/rustfs \
|
||||
;; \
|
||||
esac && \
|
||||
sccache --show-stats || true
|
||||
*) \
|
||||
echo "❌ Unsupported platform: $TARGETPLATFORM" && exit 1 \
|
||||
;; \
|
||||
esac
|
||||
|
||||
# Runtime stage - Ubuntu minimal for better compatibility
|
||||
FROM ubuntu:22.04
|
||||
@@ -117,6 +112,8 @@ RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
tzdata \
|
||||
wget \
|
||||
coreutils \
|
||||
passwd \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create rustfs user and group
|
||||
@@ -133,6 +130,10 @@ RUN mkdir -p /data/rustfs{0,1,2,3} && \
|
||||
COPY --from=builder /usr/local/bin/rustfs /app/rustfs
|
||||
RUN chmod +x /app/rustfs && chown rustfs:rustfs /app/rustfs
|
||||
|
||||
# Copy entrypoint script
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
# Switch to non-root user
|
||||
USER rustfs
|
||||
|
||||
@@ -147,12 +148,9 @@ ENV RUSTFS_ACCESS_KEY=rustfsadmin \
|
||||
RUSTFS_VOLUMES=/data \
|
||||
RUST_LOG=warn
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:9000/health || exit 1
|
||||
|
||||
# Volume for data
|
||||
VOLUME ["/data"]
|
||||
|
||||
# Set default command
|
||||
# Set entrypoint and default command
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
CMD ["/app/rustfs"]
|
||||
|
||||
192
Makefile
192
Makefile
@@ -46,21 +46,6 @@ setup-hooks:
|
||||
chmod +x .git/hooks/pre-commit
|
||||
@echo "✅ Git hooks setup complete!"
|
||||
|
||||
.PHONY: init-devenv
|
||||
init-devenv:
|
||||
$(DOCKER_CLI) build -t $(IMAGE_NAME) -f Dockerfile.source .
|
||||
$(DOCKER_CLI) stop $(CONTAINER_NAME)
|
||||
$(DOCKER_CLI) rm $(CONTAINER_NAME)
|
||||
$(DOCKER_CLI) run -d --name $(CONTAINER_NAME) -p 9010:9010 -p 9000:9000 -v $(shell pwd):/root/s3-rustfs -it $(IMAGE_NAME)
|
||||
|
||||
.PHONY: start
|
||||
start:
|
||||
$(DOCKER_CLI) start $(CONTAINER_NAME)
|
||||
|
||||
.PHONY: stop
|
||||
stop:
|
||||
$(DOCKER_CLI) stop $(CONTAINER_NAME)
|
||||
|
||||
.PHONY: e2e-server
|
||||
e2e-server:
|
||||
sh $(shell pwd)/scripts/run.sh
|
||||
@@ -80,8 +65,6 @@ build-dev:
|
||||
@echo "🔨 Building RustFS in development mode..."
|
||||
./build-rustfs.sh --dev
|
||||
|
||||
|
||||
|
||||
# Docker-based build (alternative approach)
|
||||
# Usage: make BUILD_OS=ubuntu22.04 build-docker
|
||||
# Output: target/ubuntu22.04/release/rustfs
|
||||
@@ -98,13 +81,13 @@ build-docker:
|
||||
.PHONY: build-musl
|
||||
build-musl:
|
||||
@echo "🔨 Building rustfs for x86_64-unknown-linux-musl..."
|
||||
@echo "💡 On macOS/Windows, use 'make build-docker' or 'make docker-buildx' instead"
|
||||
@echo "💡 On macOS/Windows, use 'make build-docker' or 'make docker-dev' instead"
|
||||
./build-rustfs.sh --platform x86_64-unknown-linux-musl
|
||||
|
||||
.PHONY: build-gnu
|
||||
build-gnu:
|
||||
@echo "🔨 Building rustfs for x86_64-unknown-linux-gnu..."
|
||||
@echo "💡 On macOS/Windows, use 'make build-docker' or 'make docker-buildx' instead"
|
||||
@echo "💡 On macOS/Windows, use 'make build-docker' or 'make docker-dev' instead"
|
||||
./build-rustfs.sh --platform x86_64-unknown-linux-gnu
|
||||
|
||||
.PHONY: deploy-dev
|
||||
@@ -112,15 +95,19 @@ deploy-dev: build-musl
|
||||
@echo "🚀 Deploying to dev server: $${IP}"
|
||||
./scripts/dev_deploy.sh $${IP}
|
||||
|
||||
# Multi-architecture Docker build targets (NEW: using docker-buildx.sh)
|
||||
# ========================================================================================
|
||||
# Docker Multi-Architecture Builds (Primary Methods)
|
||||
# ========================================================================================
|
||||
|
||||
# Production builds using docker-buildx.sh (for CI/CD and production)
|
||||
.PHONY: docker-buildx
|
||||
docker-buildx:
|
||||
@echo "🏗️ Building multi-architecture Docker images with buildx..."
|
||||
@echo "🏗️ Building multi-architecture production Docker images with buildx..."
|
||||
./docker-buildx.sh
|
||||
|
||||
.PHONY: docker-buildx-push
|
||||
docker-buildx-push:
|
||||
@echo "🚀 Building and pushing multi-architecture Docker images with buildx..."
|
||||
@echo "🚀 Building and pushing multi-architecture production Docker images with buildx..."
|
||||
./docker-buildx.sh --push
|
||||
|
||||
.PHONY: docker-buildx-version
|
||||
@@ -129,7 +116,7 @@ docker-buildx-version:
|
||||
echo "❌ 错误: 请指定版本, 例如: make docker-buildx-version VERSION=v1.0.0"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "🏗️ Building multi-architecture Docker images (version: $(VERSION))..."
|
||||
@echo "🏗️ Building multi-architecture production Docker images (version: $(VERSION))..."
|
||||
./docker-buildx.sh --release $(VERSION)
|
||||
|
||||
.PHONY: docker-buildx-push-version
|
||||
@@ -138,21 +125,114 @@ docker-buildx-push-version:
|
||||
echo "❌ 错误: 请指定版本, 例如: make docker-buildx-push-version VERSION=v1.0.0"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "🚀 Building and pushing multi-architecture Docker images (version: $(VERSION))..."
|
||||
@echo "🚀 Building and pushing multi-architecture production Docker images (version: $(VERSION))..."
|
||||
./docker-buildx.sh --release $(VERSION) --push
|
||||
|
||||
# Development/Source builds using direct buildx commands
|
||||
.PHONY: docker-dev
|
||||
docker-dev:
|
||||
@echo "🏗️ Building multi-architecture development Docker images with buildx..."
|
||||
@echo "💡 This builds from source code and is intended for local development and testing"
|
||||
@echo "⚠️ Multi-arch images cannot be loaded locally, use docker-dev-push to push to registry"
|
||||
$(DOCKER_CLI) buildx build \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--file $(DOCKERFILE_SOURCE) \
|
||||
--tag rustfs:source-latest \
|
||||
--tag rustfs:dev-latest \
|
||||
.
|
||||
|
||||
.PHONY: docker-dev-local
|
||||
docker-dev-local:
|
||||
@echo "🏗️ Building single-architecture development Docker image for local use..."
|
||||
@echo "💡 This builds from source code for the current platform and loads locally"
|
||||
$(DOCKER_CLI) buildx build \
|
||||
--file $(DOCKERFILE_SOURCE) \
|
||||
--tag rustfs:source-latest \
|
||||
--tag rustfs:dev-latest \
|
||||
--load \
|
||||
.
|
||||
|
||||
.PHONY: docker-dev-push
|
||||
docker-dev-push:
|
||||
@if [ -z "$(REGISTRY)" ]; then \
|
||||
echo "❌ 错误: 请指定镜像仓库, 例如: make docker-dev-push REGISTRY=ghcr.io/username"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "🚀 Building and pushing multi-architecture development Docker images..."
|
||||
@echo "💡 推送到仓库: $(REGISTRY)"
|
||||
$(DOCKER_CLI) buildx build \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--file $(DOCKERFILE_SOURCE) \
|
||||
--tag $(REGISTRY)/rustfs:source-latest \
|
||||
--tag $(REGISTRY)/rustfs:dev-latest \
|
||||
--push \
|
||||
.
|
||||
|
||||
|
||||
|
||||
# Local production builds using direct buildx (alternative to docker-buildx.sh)
|
||||
.PHONY: docker-buildx-production-local
|
||||
docker-buildx-production-local:
|
||||
@echo "🏗️ Building single-architecture production Docker image locally..."
|
||||
@echo "💡 Alternative to docker-buildx.sh for local testing"
|
||||
$(DOCKER_CLI) buildx build \
|
||||
--file $(DOCKERFILE_PRODUCTION) \
|
||||
--tag rustfs:production-latest \
|
||||
--tag rustfs:latest \
|
||||
--load \
|
||||
--build-arg RELEASE=latest \
|
||||
.
|
||||
|
||||
# ========================================================================================
|
||||
# Single Architecture Docker Builds (Traditional)
|
||||
# ========================================================================================
|
||||
|
||||
.PHONY: docker-build-production
|
||||
docker-build-production:
|
||||
@echo "🏗️ Building production Docker image..."
|
||||
@echo "🏗️ Building single-architecture production Docker image..."
|
||||
@echo "💡 Consider using 'make docker-buildx-production-local' for multi-arch support"
|
||||
$(DOCKER_CLI) build -f $(DOCKERFILE_PRODUCTION) -t rustfs:latest .
|
||||
|
||||
.PHONY: docker-build-source
|
||||
docker-build-source:
|
||||
@echo "🏗️ Building source Docker image..."
|
||||
@echo "🏗️ Building single-architecture source Docker image..."
|
||||
@echo "💡 Consider using 'make docker-dev-local' for multi-arch support"
|
||||
$(DOCKER_CLI) build -f $(DOCKERFILE_SOURCE) -t rustfs:source .
|
||||
|
||||
# ========================================================================================
|
||||
# Development Environment
|
||||
# ========================================================================================
|
||||
|
||||
.PHONY: dev-env-start
|
||||
dev-env-start:
|
||||
@echo "🚀 Starting development environment..."
|
||||
$(DOCKER_CLI) buildx build \
|
||||
--file $(DOCKERFILE_SOURCE) \
|
||||
--tag rustfs:dev \
|
||||
--load \
|
||||
.
|
||||
$(DOCKER_CLI) stop $(CONTAINER_NAME) 2>/dev/null || true
|
||||
$(DOCKER_CLI) rm $(CONTAINER_NAME) 2>/dev/null || true
|
||||
$(DOCKER_CLI) run -d --name $(CONTAINER_NAME) \
|
||||
-p 9010:9010 -p 9000:9000 \
|
||||
-v $(shell pwd):/workspace \
|
||||
-it rustfs:dev
|
||||
|
||||
.PHONY: dev-env-stop
|
||||
dev-env-stop:
|
||||
@echo "🛑 Stopping development environment..."
|
||||
$(DOCKER_CLI) stop $(CONTAINER_NAME) 2>/dev/null || true
|
||||
$(DOCKER_CLI) rm $(CONTAINER_NAME) 2>/dev/null || true
|
||||
|
||||
.PHONY: dev-env-restart
|
||||
dev-env-restart: dev-env-stop dev-env-start
|
||||
|
||||
|
||||
|
||||
# ========================================================================================
|
||||
# Build Utilities
|
||||
# ========================================================================================
|
||||
|
||||
.PHONY: docker-inspect-multiarch
|
||||
docker-inspect-multiarch:
|
||||
@if [ -z "$(IMAGE)" ]; then \
|
||||
@@ -165,7 +245,7 @@ docker-inspect-multiarch:
|
||||
.PHONY: build-cross-all
|
||||
build-cross-all:
|
||||
@echo "🔧 Building all target architectures..."
|
||||
@echo "💡 On macOS/Windows, use 'make docker-buildx' for reliable multi-arch builds"
|
||||
@echo "💡 On macOS/Windows, use 'make docker-dev' for reliable multi-arch builds"
|
||||
@echo "🔨 Generating protobuf code..."
|
||||
cargo run --bin gproto || true
|
||||
@echo "🔨 Building x86_64-unknown-linux-musl..."
|
||||
@@ -174,6 +254,10 @@ build-cross-all:
|
||||
./build-rustfs.sh --platform aarch64-unknown-linux-gnu
|
||||
@echo "✅ All architectures built successfully!"
|
||||
|
||||
# ========================================================================================
|
||||
# Help and Documentation
|
||||
# ========================================================================================
|
||||
|
||||
.PHONY: help-build
|
||||
help-build:
|
||||
@echo "🔨 RustFS 构建帮助:"
|
||||
@@ -206,23 +290,61 @@ help-build:
|
||||
help-docker:
|
||||
@echo "🐳 Docker 多架构构建帮助:"
|
||||
@echo ""
|
||||
@echo "🚀 推荐使用 (新的 docker-buildx 方式):"
|
||||
@echo " make docker-buildx # 构建多架构镜像(不推送)"
|
||||
@echo " make docker-buildx-push # 构建并推送多架构镜像"
|
||||
@echo "🚀 生产镜像构建 (推荐使用 docker-buildx.sh):"
|
||||
@echo " make docker-buildx # 构建生产多架构镜像(不推送)"
|
||||
@echo " make docker-buildx-push # 构建并推送生产多架构镜像"
|
||||
@echo " make docker-buildx-version VERSION=v1.0.0 # 构建指定版本"
|
||||
@echo " make docker-buildx-push-version VERSION=v1.0.0 # 构建并推送指定版本"
|
||||
@echo ""
|
||||
@echo "🏗️ 单架构构建:"
|
||||
@echo " make docker-build-production # 构建生产环境镜像"
|
||||
@echo " make docker-build-source # 构建源码构建镜像"
|
||||
@echo "🔧 开发/源码镜像构建 (本地开发测试):"
|
||||
@echo " make docker-dev # 构建开发多架构镜像(无法本地加载)"
|
||||
@echo " make docker-dev-local # 构建开发单架构镜像(本地加载)"
|
||||
@echo " make docker-dev-push REGISTRY=xxx # 构建并推送开发镜像"
|
||||
@echo ""
|
||||
@echo "🏗️ 本地生产镜像构建 (替代方案):"
|
||||
@echo " make docker-buildx-production-local # 本地构建生产单架构镜像"
|
||||
@echo ""
|
||||
@echo "📦 单架构构建 (传统方式):"
|
||||
@echo " make docker-build-production # 构建单架构生产镜像"
|
||||
@echo " make docker-build-source # 构建单架构源码镜像"
|
||||
@echo ""
|
||||
@echo "🚀 开发环境管理:"
|
||||
@echo " make dev-env-start # 启动开发容器环境"
|
||||
@echo " make dev-env-stop # 停止开发容器环境"
|
||||
@echo " make dev-env-restart # 重启开发容器环境"
|
||||
@echo ""
|
||||
@echo "🔧 辅助工具:"
|
||||
@echo " make build-cross-all # 构建所有架构的二进制文件"
|
||||
@echo " make docker-inspect-multiarch IMAGE=xxx # 检查镜像的架构支持"
|
||||
@echo ""
|
||||
@echo "📋 环境变量 (在推送时需要设置):"
|
||||
@echo "📋 环境变量:"
|
||||
@echo " REGISTRY 镜像仓库地址 (推送时需要)"
|
||||
@echo " DOCKERHUB_USERNAME Docker Hub 用户名"
|
||||
@echo " DOCKERHUB_TOKEN Docker Hub 访问令牌"
|
||||
@echo " GITHUB_TOKEN GitHub 访问令牌"
|
||||
@echo ""
|
||||
@echo "💡 更多详情请参考项目根目录的 docker-buildx.sh 脚本"
|
||||
@echo "💡 建议:"
|
||||
@echo " - 生产用途: 使用 docker-buildx* 命令 (基于预编译二进制)"
|
||||
@echo " - 本地开发: 使用 docker-dev* 命令 (从源码构建)"
|
||||
@echo " - 开发环境: 使用 dev-env-* 命令管理开发容器"
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "🦀 RustFS Makefile 帮助:"
|
||||
@echo ""
|
||||
@echo "📋 主要命令分类:"
|
||||
@echo " make help-build # 显示构建相关帮助"
|
||||
@echo " make help-docker # 显示 Docker 相关帮助"
|
||||
@echo ""
|
||||
@echo "🔧 代码质量:"
|
||||
@echo " make fmt # 格式化代码"
|
||||
@echo " make clippy # 运行 clippy 检查"
|
||||
@echo " make test # 运行测试"
|
||||
@echo " make pre-commit # 运行所有预提交检查"
|
||||
@echo ""
|
||||
@echo "🚀 快速开始:"
|
||||
@echo " make build # 构建 RustFS 二进制"
|
||||
@echo " make docker-dev-local # 构建开发 Docker 镜像(本地)"
|
||||
@echo " make dev-env-start # 启动开发环境"
|
||||
@echo ""
|
||||
@echo "💡 更多帮助请使用 'make help-build' 或 'make help-docker'"
|
||||
|
||||
@@ -108,14 +108,26 @@ pub const DEFAULT_CONSOLE_ADDRESS: &str = concat!(":", DEFAULT_CONSOLE_PORT);
|
||||
/// It is used to store the logs of the application.
|
||||
/// Default value: rustfs.log
|
||||
/// Environment variable: RUSTFS_OBSERVABILITY_LOG_FILENAME
|
||||
pub const DEFAULT_LOG_FILENAME: &str = "rustfs.log";
|
||||
pub const DEFAULT_LOG_FILENAME: &str = "rustfs";
|
||||
|
||||
/// Default OBS log filename for rustfs
|
||||
/// This is the default log filename for OBS.
|
||||
/// It is used to store the logs of the application.
|
||||
/// Default value: rustfs.log
|
||||
pub const DEFAULT_OBS_LOG_FILENAME: &str = concat!(DEFAULT_LOG_FILENAME, ".log");
|
||||
|
||||
/// Default sink file log file for rustfs
|
||||
/// This is the default sink file log file for rustfs.
|
||||
/// It is used to store the logs of the application.
|
||||
/// Default value: rustfs-sink.log
|
||||
pub const DEFAULT_SINK_FILE_LOG_FILE: &str = concat!(DEFAULT_LOG_FILENAME, "-sink.log");
|
||||
|
||||
/// Default log directory for rustfs
|
||||
/// This is the default log directory for rustfs.
|
||||
/// It is used to store the logs of the application.
|
||||
/// Default value: logs
|
||||
/// Environment variable: RUSTFS_OBSERVABILITY_LOG_DIRECTORY
|
||||
pub const DEFAULT_LOG_DIR: &str = "deploy/logs";
|
||||
pub const DEFAULT_LOG_DIR: &str = "/logs";
|
||||
|
||||
/// Default log rotation size mb for rustfs
|
||||
/// This is the default log rotation size for rustfs.
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
// limitations under the License.
|
||||
|
||||
use crate::error::{Error, Result, is_err_config_not_found};
|
||||
use crate::sys::get_claims_from_token_with_secret;
|
||||
use crate::{
|
||||
cache::{Cache, CacheEntity},
|
||||
error::{Error as IamError, is_err_no_such_group, is_err_no_such_policy, is_err_no_such_user},
|
||||
@@ -26,7 +27,7 @@ use rustfs_ecstore::global::get_global_action_cred;
|
||||
use rustfs_madmin::{AccountStatus, AddOrUpdateUserReq, GroupDesc};
|
||||
use rustfs_policy::{
|
||||
arn::ARN,
|
||||
auth::{self, Credentials, UserIdentity, get_claims_from_token_with_secret, is_secret_key_valid, jwt_sign},
|
||||
auth::{self, Credentials, UserIdentity, is_secret_key_valid, jwt_sign},
|
||||
format::Format,
|
||||
policy::{
|
||||
EMBEDDED_POLICY_TYPE, INHERITED_POLICY_TYPE, Policy, PolicyDoc, default::DEFAULT_POLICIES, iam_policy_claim_name_sa,
|
||||
|
||||
@@ -23,6 +23,7 @@ use crate::store::GroupInfo;
|
||||
use crate::store::MappedPolicy;
|
||||
use crate::store::Store;
|
||||
use crate::store::UserType;
|
||||
use crate::utils::extract_claims;
|
||||
use rustfs_ecstore::global::get_global_action_cred;
|
||||
use rustfs_madmin::AddOrUpdateUserReq;
|
||||
use rustfs_madmin::GroupDesc;
|
||||
@@ -542,7 +543,7 @@ impl<T: Store> IamSys<T> {
|
||||
}
|
||||
};
|
||||
|
||||
if policies.is_empty() {
|
||||
if !is_owner && policies.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -732,3 +733,18 @@ pub struct UpdateServiceAccountOpts {
|
||||
pub expiration: Option<OffsetDateTime>,
|
||||
pub status: Option<String>,
|
||||
}
|
||||
|
||||
pub fn get_claims_from_token_with_secret(token: &str, secret: &str) -> Result<HashMap<String, Value>> {
|
||||
let mut ms =
|
||||
extract_claims::<HashMap<String, Value>>(token, secret).map_err(|e| Error::other(format!("extract claims err {e}")))?;
|
||||
|
||||
if let Some(session_policy) = ms.claims.get(SESSION_POLICY_NAME) {
|
||||
let policy_str = session_policy.as_str().unwrap_or_default();
|
||||
let policy = base64_decode(policy_str.as_bytes()).map_err(|e| Error::other(format!("base64 decode err {e}")))?;
|
||||
ms.claims.insert(
|
||||
SESSION_POLICY_NAME_EXTRACTED.to_string(),
|
||||
Value::String(String::from_utf8(policy).map_err(|e| Error::other(format!("utf8 decode err {e}")))?),
|
||||
);
|
||||
}
|
||||
Ok(ms.claims)
|
||||
}
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
|
||||
use rustfs_config::{
|
||||
APP_NAME, DEFAULT_LOG_DIR, DEFAULT_LOG_FILENAME, DEFAULT_LOG_KEEP_FILES, DEFAULT_LOG_LEVEL, DEFAULT_LOG_ROTATION_SIZE_MB,
|
||||
DEFAULT_LOG_ROTATION_TIME, ENVIRONMENT, METER_INTERVAL, SAMPLE_RATIO, SERVICE_VERSION, USE_STDOUT,
|
||||
DEFAULT_LOG_ROTATION_TIME, DEFAULT_OBS_LOG_FILENAME, DEFAULT_SINK_FILE_LOG_FILE, ENVIRONMENT, METER_INTERVAL, SAMPLE_RATIO,
|
||||
SERVICE_VERSION, USE_STDOUT,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
@@ -104,7 +105,7 @@ impl OtelConfig {
|
||||
log_filename: env::var("RUSTFS_OBS_LOG_FILENAME")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.or(Some(DEFAULT_LOG_FILENAME.to_string())),
|
||||
.or(Some(DEFAULT_OBS_LOG_FILENAME.to_string())),
|
||||
log_rotation_size_mb: env::var("RUSTFS_OBS_LOG_ROTATION_SIZE_MB")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
@@ -210,16 +211,15 @@ pub struct FileSinkConfig {
|
||||
|
||||
impl FileSinkConfig {
|
||||
pub fn get_default_log_path() -> String {
|
||||
let temp_dir = env::temp_dir().join("rustfs");
|
||||
|
||||
let temp_dir = env::temp_dir().join(DEFAULT_LOG_FILENAME);
|
||||
if let Err(e) = std::fs::create_dir_all(&temp_dir) {
|
||||
eprintln!("Failed to create log directory: {e}");
|
||||
return "rustfs/rustfs.log".to_string();
|
||||
return DEFAULT_LOG_DIR.to_string();
|
||||
}
|
||||
temp_dir
|
||||
.join("rustfs.log")
|
||||
.join(DEFAULT_SINK_FILE_LOG_FILE)
|
||||
.to_str()
|
||||
.unwrap_or("rustfs/rustfs.log")
|
||||
.unwrap_or(DEFAULT_LOG_DIR)
|
||||
.to_string()
|
||||
}
|
||||
pub fn new() -> Self {
|
||||
|
||||
@@ -77,8 +77,7 @@ impl Logger {
|
||||
}
|
||||
|
||||
/// Asynchronous logging of unified log entries
|
||||
#[tracing::instrument(skip(self), fields(log_source = "logger"))]
|
||||
#[tracing::instrument(level = "error", skip_all)]
|
||||
#[tracing::instrument(skip_all, fields(log_source = "logger"))]
|
||||
pub async fn log_entry(&self, entry: UnifiedLogEntry) -> Result<(), GlobalError> {
|
||||
// Extract information for tracing based on entry type
|
||||
match &entry {
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
use crate::{AppConfig, SinkConfig, UnifiedLogEntry};
|
||||
use async_trait::async_trait;
|
||||
use rustfs_config::DEFAULT_SINK_FILE_LOG_FILE;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[cfg(feature = "file")]
|
||||
@@ -71,7 +72,7 @@ pub async fn create_sinks(config: &AppConfig) -> Vec<Arc<dyn Sink>> {
|
||||
SinkConfig::File(file_config) => {
|
||||
tracing::debug!("FileSink: Using path: {}", file_config.path);
|
||||
match file::FileSink::new(
|
||||
file_config.path.clone(),
|
||||
format!("{}/{}", file_config.path.clone(), DEFAULT_SINK_FILE_LOG_FILE),
|
||||
file_config.buffer_size.unwrap_or(8192),
|
||||
file_config.flush_interval_ms.unwrap_or(1000),
|
||||
file_config.flush_threshold.unwrap_or(100),
|
||||
|
||||
@@ -16,8 +16,6 @@ use crate::error::Error as IamError;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::policy::{INHERITED_POLICY_TYPE, Policy, Validator, iam_policy_claim_name_sa};
|
||||
use crate::utils;
|
||||
use crate::utils::extract_claims;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use std::collections::HashMap;
|
||||
@@ -253,12 +251,6 @@ pub fn create_new_credentials_with_metadata(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_claims_from_token_with_secret<T: DeserializeOwned>(token: &str, secret: &str) -> Result<T> {
|
||||
let ms = extract_claims::<T>(token, secret)?;
|
||||
// TODO SessionPolicyName
|
||||
Ok(ms.claims)
|
||||
}
|
||||
|
||||
pub fn jwt_sign<T: Serialize>(claims: &T, token_secret: &str) -> Result<String> {
|
||||
let token = utils::generate_jwt(claims, token_secret)?;
|
||||
Ok(token)
|
||||
|
||||
@@ -39,14 +39,21 @@ pub async fn node_service_time_out_client(
|
||||
Box<dyn Error>,
|
||||
> {
|
||||
let token: MetadataValue<_> = "rustfs rpc".parse()?;
|
||||
let channel = match GLOBAL_Conn_Map.read().await.get(addr) {
|
||||
Some(channel) => channel.clone(),
|
||||
|
||||
let channel = { GLOBAL_Conn_Map.read().await.get(addr).cloned() };
|
||||
|
||||
let channel = match channel {
|
||||
Some(channel) => channel,
|
||||
None => {
|
||||
let connector = Endpoint::from_shared(addr.to_string())?.connect_timeout(Duration::from_secs(60));
|
||||
connector.connect().await?
|
||||
let channel = connector.connect().await?;
|
||||
|
||||
{
|
||||
GLOBAL_Conn_Map.write().await.insert(addr.to_string(), channel.clone());
|
||||
}
|
||||
channel
|
||||
}
|
||||
};
|
||||
GLOBAL_Conn_Map.write().await.insert(addr.to_string(), channel.clone());
|
||||
|
||||
// let timeout_channel = Timeout::new(channel, Duration::from_secs(60));
|
||||
Ok(NodeServiceClient::with_interceptor(
|
||||
|
||||
@@ -248,6 +248,8 @@ done
|
||||
# Main execution
|
||||
main() {
|
||||
print_message $BLUE "🐳 RustFS Docker Buildx Build Script"
|
||||
print_message $YELLOW "📋 Build Strategy: Uses pre-built binaries from dl.rustfs.com"
|
||||
print_message $YELLOW "🚀 Production images only - optimized for distribution"
|
||||
echo ""
|
||||
|
||||
# Check prerequisites
|
||||
|
||||
104
entrypoint.sh
Normal file
104
entrypoint.sh
Normal file
@@ -0,0 +1,104 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
APP_USER=rustfs
|
||||
APP_GROUP=rustfs
|
||||
APP_UID=${PUID:-1000}
|
||||
APP_GID=${PGID:-1000}
|
||||
|
||||
# Parse RUSTFS_VOLUMES into array (support space, comma, tab as separator)
|
||||
VOLUME_RAW="${RUSTFS_VOLUMES:-/data}"
|
||||
# Replace comma and tab with space, then split
|
||||
VOLUME_RAW=$(echo "$VOLUME_RAW" | tr ',\t' ' ')
|
||||
read -ra ALL_VOLUMES <<< "$VOLUME_RAW"
|
||||
|
||||
# Only keep local volumes (start with /, not http/https)
|
||||
LOCAL_VOLUMES=()
|
||||
for vol in "${ALL_VOLUMES[@]}"; do
|
||||
if [[ "$vol" =~ ^/ ]] && [[ ! "$vol" =~ ^https?:// ]]; then
|
||||
# Not a URL (http/https), just a local path
|
||||
LOCAL_VOLUMES+=("$vol")
|
||||
fi
|
||||
# If it's a URL (http/https), skip
|
||||
# If it's an empty string, skip
|
||||
# If it's a local path, keep
|
||||
# (We don't support other protocols here)
|
||||
done
|
||||
|
||||
# Always ensure /logs is included for permission fix
|
||||
include_logs=1
|
||||
for vol in "${LOCAL_VOLUMES[@]}"; do
|
||||
if [ "$vol" = "/logs" ]; then
|
||||
include_logs=0
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ $include_logs -eq 1 ]; then
|
||||
LOCAL_VOLUMES+=("/logs")
|
||||
fi
|
||||
|
||||
# Try to update rustfs UID/GID if needed (requires root and shadow tools)
|
||||
update_user_group_ids() {
|
||||
local uid="$1"
|
||||
local gid="$2"
|
||||
local user="$3"
|
||||
local group="$4"
|
||||
local updated=0
|
||||
if [ "$(id -u "$user")" != "$uid" ]; then
|
||||
if command -v usermod >/dev/null 2>&1; then
|
||||
echo "🔧 Updating UID of $user to $uid"
|
||||
usermod -u "$uid" "$user"
|
||||
updated=1
|
||||
fi
|
||||
fi
|
||||
if [ "$(id -g "$group")" != "$gid" ]; then
|
||||
if command -v groupmod >/dev/null 2>&1; then
|
||||
echo "🔧 Updating GID of $group to $gid"
|
||||
groupmod -g "$gid" "$group"
|
||||
updated=1
|
||||
fi
|
||||
fi
|
||||
return $updated
|
||||
}
|
||||
|
||||
echo "📦 Initializing mount directories: ${LOCAL_VOLUMES[*]}"
|
||||
|
||||
for vol in "${LOCAL_VOLUMES[@]}"; do
|
||||
if [ ! -d "$vol" ]; then
|
||||
echo "📁 Creating directory: $vol"
|
||||
mkdir -p "$vol"
|
||||
fi
|
||||
|
||||
# Alpine busybox stat does not support -c, coreutils is required
|
||||
dir_uid=$(stat -c '%u' "$vol")
|
||||
dir_gid=$(stat -c '%g' "$vol")
|
||||
|
||||
if [ "$dir_uid" != "$APP_UID" ] || [ "$dir_gid" != "$APP_GID" ]; then
|
||||
if [[ "$SKIP_CHOWN" != "true" ]]; then
|
||||
# Prefer to update rustfs user/group UID/GID
|
||||
update_user_group_ids "$dir_uid" "$dir_gid" "$APP_USER" "$APP_GROUP" || \
|
||||
{
|
||||
echo "🔧 Fixing ownership for: $vol → $APP_USER:$APP_GROUP"
|
||||
if [[ -n "$CHOWN_RECURSION_DEPTH" ]]; then
|
||||
echo "🔧 Applying ownership fix with recursion depth: $CHOWN_RECURSION_DEPTH"
|
||||
find "$vol" -mindepth 0 -maxdepth "$CHOWN_RECURSION_DEPTH" -exec chown "$APP_USER:$APP_GROUP" {} \;
|
||||
else
|
||||
echo "🔧 Applying ownership fix recursively (full depth)"
|
||||
chown -R "$APP_USER:$APP_GROUP" "$vol"
|
||||
fi
|
||||
}
|
||||
else
|
||||
echo "⚠️ SKIP_CHOWN is enabled. Skipping ownership fix for: $vol"
|
||||
fi
|
||||
fi
|
||||
chmod 700 "$vol"
|
||||
done
|
||||
|
||||
# Warn if default credentials are used
|
||||
if [[ "$RUSTFS_ACCESS_KEY" == "rustfsadmin" || "$RUSTFS_SECRET_KEY" == "rustfsadmin" ]]; then
|
||||
echo "⚠️ WARNING: Using default RUSTFS_ACCESS_KEY or RUSTFS_SECRET_KEY"
|
||||
echo "⚠️ It is strongly recommended to override these values in production!"
|
||||
fi
|
||||
|
||||
echo "🚀 Starting application: $*"
|
||||
exec gosu "$APP_USER" "$@"
|
||||
@@ -17,8 +17,8 @@ use http::Uri;
|
||||
use rustfs_ecstore::global::get_global_action_cred;
|
||||
use rustfs_iam::error::Error as IamError;
|
||||
use rustfs_iam::sys::SESSION_POLICY_NAME;
|
||||
use rustfs_iam::sys::get_claims_from_token_with_secret;
|
||||
use rustfs_policy::auth;
|
||||
use rustfs_policy::auth::get_claims_from_token_with_secret;
|
||||
use s3s::S3Error;
|
||||
use s3s::S3ErrorCode;
|
||||
use s3s::S3Result;
|
||||
|
||||
@@ -199,7 +199,7 @@ async fn run(opt: config::Opt) -> Result<()> {
|
||||
if result.update_available {
|
||||
if let Some(latest) = &result.latest_version {
|
||||
info!(
|
||||
"🚀 New version available: {} -> {} (current: {})",
|
||||
"🚀 Version check: New version available: {} -> {} (current: {})",
|
||||
result.current_version, latest.version, result.current_version
|
||||
);
|
||||
if let Some(notes) = &latest.release_notes {
|
||||
@@ -210,14 +210,14 @@ async fn run(opt: config::Opt) -> Result<()> {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
debug!("✅ Current version is up to date: {}", result.current_version);
|
||||
debug!("✅ Version check: Current version is up to date: {}", result.current_version);
|
||||
}
|
||||
}
|
||||
Err(UpdateCheckError::HttpError(e)) => {
|
||||
debug!("Version check network error (this is normal): {}", e);
|
||||
debug!("Version check: network error (this is normal): {}", e);
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Version check failed (this is normal): {}", e);
|
||||
debug!("Version check: failed (this is normal): {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -792,11 +792,6 @@ impl S3 for FS {
|
||||
};
|
||||
let last_modified = info.mod_time.map(Timestamp::from);
|
||||
|
||||
let body = Some(StreamingBlob::wrap(bytes_stream(
|
||||
ReaderStream::with_capacity(reader.stream, DEFAULT_READ_BUFFER_SIZE),
|
||||
info.size as usize,
|
||||
)));
|
||||
|
||||
let mut rs = rs;
|
||||
|
||||
if let Some(part_number) = part_number {
|
||||
@@ -805,17 +800,25 @@ impl S3 for FS {
|
||||
}
|
||||
}
|
||||
|
||||
let mut content_length = info.size as i64;
|
||||
|
||||
let content_range = if let Some(rs) = rs {
|
||||
let total_size = info.get_actual_size().map_err(ApiError::from)?;
|
||||
let (start, length) = rs.get_offset_length(total_size as i64).map_err(ApiError::from)?;
|
||||
content_length = length;
|
||||
Some(format!("bytes {}-{}/{}", start, start as i64 + length - 1, total_size))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let body = Some(StreamingBlob::wrap(bytes_stream(
|
||||
ReaderStream::with_capacity(reader.stream, DEFAULT_READ_BUFFER_SIZE),
|
||||
content_length as usize,
|
||||
)));
|
||||
|
||||
let output = GetObjectOutput {
|
||||
body,
|
||||
content_length: Some(info.size as i64),
|
||||
content_length: Some(content_length),
|
||||
last_modified,
|
||||
content_type,
|
||||
accept_ranges: Some("bytes".to_string()),
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, error, info, warn};
|
||||
use tracing::{debug, error, info};
|
||||
|
||||
use crate::version;
|
||||
|
||||
@@ -86,7 +86,7 @@ impl VersionChecker {
|
||||
|
||||
Self {
|
||||
client,
|
||||
version_url: "https://version.rustfs.com".to_string(),
|
||||
version_url: "https://version.rustfs.com/latest.json".to_string(),
|
||||
timeout: Duration::from_secs(10),
|
||||
}
|
||||
}
|
||||
@@ -139,8 +139,9 @@ impl VersionChecker {
|
||||
|
||||
debug!("Retrieved latest version information: {:?}", version_info);
|
||||
|
||||
// Compare versions
|
||||
let update_available = self.is_newer_version(¤t_version, &version_info.version)?;
|
||||
// Compare versions using version.rs functions
|
||||
let update_available = version::is_newer_version(¤t_version, &version_info.version)
|
||||
.map_err(|e| UpdateCheckError::VersionParseError(e.to_string()))?;
|
||||
|
||||
let result = UpdateCheckResult {
|
||||
update_available,
|
||||
@@ -161,74 +162,6 @@ impl VersionChecker {
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Compare version numbers to determine if there's an update
|
||||
fn is_newer_version(&self, current: &str, latest: &str) -> Result<bool, UpdateCheckError> {
|
||||
// Clean version numbers, remove prefixes like "v", "RELEASE.", etc.
|
||||
let current_clean = self.clean_version(current);
|
||||
let latest_clean = self.clean_version(latest);
|
||||
|
||||
debug!("Version comparison: current='{}' vs latest='{}'", current_clean, latest_clean);
|
||||
|
||||
// If versions are the same, no update is needed
|
||||
if current_clean == latest_clean {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Try semantic version comparison
|
||||
match self.compare_semantic_versions(¤t_clean, &latest_clean) {
|
||||
Ok(is_newer) => Ok(is_newer),
|
||||
Err(_) => {
|
||||
// If semantic version comparison fails, use string comparison
|
||||
warn!("Semantic version comparison failed, using string comparison");
|
||||
Ok(latest_clean > current_clean)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clean version string
|
||||
fn clean_version(&self, version: &str) -> String {
|
||||
version
|
||||
.trim()
|
||||
.trim_start_matches('v')
|
||||
.trim_start_matches("RELEASE.")
|
||||
.trim_start_matches('@')
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Semantic version comparison
|
||||
fn compare_semantic_versions(&self, current: &str, latest: &str) -> Result<bool, UpdateCheckError> {
|
||||
let current_parts = self.parse_version_parts(current)?;
|
||||
let latest_parts = self.parse_version_parts(latest)?;
|
||||
|
||||
// Use tuple comparison for lexicographic ordering
|
||||
Ok(latest_parts > current_parts)
|
||||
}
|
||||
|
||||
/// Parse version parts (major, minor, patch)
|
||||
fn parse_version_parts(&self, version: &str) -> Result<(u32, u32, u32), UpdateCheckError> {
|
||||
let parts: Vec<&str> = version.split('.').collect();
|
||||
|
||||
if parts.len() < 3 {
|
||||
return Err(UpdateCheckError::VersionParseError(format!("Invalid version format: {version}")));
|
||||
}
|
||||
|
||||
let major = parts[0]
|
||||
.parse::<u32>()
|
||||
.map_err(|_| UpdateCheckError::VersionParseError(format!("Cannot parse major version: {}", parts[0])))?;
|
||||
|
||||
let minor = parts[1]
|
||||
.parse::<u32>()
|
||||
.map_err(|_| UpdateCheckError::VersionParseError(format!("Cannot parse minor version: {}", parts[1])))?;
|
||||
|
||||
// Patch version may contain other characters, only take numeric part
|
||||
let patch_str = parts[2].chars().take_while(|c| c.is_numeric()).collect::<String>();
|
||||
let patch = patch_str
|
||||
.parse::<u32>()
|
||||
.map_err(|_| UpdateCheckError::VersionParseError(format!("Cannot parse patch version: {}", parts[2])))?;
|
||||
|
||||
Ok((major, minor, patch))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get current version number
|
||||
@@ -253,37 +186,6 @@ pub async fn check_updates_with_url(url: String) -> Result<UpdateCheckResult, Up
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_clean_version() {
|
||||
let checker = VersionChecker::new();
|
||||
|
||||
assert_eq!(checker.clean_version("v1.0.0"), "1.0.0");
|
||||
assert_eq!(checker.clean_version("RELEASE.1.0.0"), "1.0.0");
|
||||
assert_eq!(checker.clean_version("@1.0.0"), "1.0.0");
|
||||
assert_eq!(checker.clean_version("1.0.0"), "1.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_parts() {
|
||||
let checker = VersionChecker::new();
|
||||
|
||||
assert_eq!(checker.parse_version_parts("1.0.0").unwrap(), (1, 0, 0));
|
||||
assert_eq!(checker.parse_version_parts("2.1.3").unwrap(), (2, 1, 3));
|
||||
assert_eq!(checker.parse_version_parts("1.0.0-beta").unwrap(), (1, 0, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_comparison() {
|
||||
let checker = VersionChecker::new();
|
||||
|
||||
// Test semantic version comparison
|
||||
assert!(checker.is_newer_version("1.0.0", "1.0.1").unwrap());
|
||||
assert!(checker.is_newer_version("1.0.0", "1.1.0").unwrap());
|
||||
assert!(checker.is_newer_version("1.0.0", "2.0.0").unwrap());
|
||||
assert!(!checker.is_newer_version("1.0.1", "1.0.0").unwrap());
|
||||
assert!(!checker.is_newer_version("1.0.0", "1.0.0").unwrap());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_current_version() {
|
||||
let version = get_current_version();
|
||||
@@ -428,4 +330,21 @@ mod tests {
|
||||
|
||||
println!("✅ VersionInfo tests passed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_functions_integration() {
|
||||
// Test that version functions from version.rs work correctly
|
||||
assert_eq!(version::clean_version("refs/tags/1.0.0-alpha.17"), "1.0.0-alpha.17");
|
||||
assert_eq!(version::clean_version("v1.0.0"), "1.0.0");
|
||||
|
||||
// Test version comparison
|
||||
assert!(version::is_newer_version("1.0.0", "1.0.1").unwrap());
|
||||
assert!(!version::is_newer_version("1.0.1", "1.0.0").unwrap());
|
||||
|
||||
// Test version parsing using parse_version
|
||||
assert_eq!(version::parse_version("1.0.0").unwrap(), (1, 0, 0, None));
|
||||
assert_eq!(version::parse_version("2.1.3-alpha.1").unwrap(), (2, 1, 3, Some("alpha.1".to_string())));
|
||||
|
||||
println!("✅ Version functions integration tests passed");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,362 @@
|
||||
use shadow_rs::shadow;
|
||||
use std::process::Command;
|
||||
|
||||
shadow!(build);
|
||||
|
||||
type VersionParseResult = Result<(u32, u32, u32, Option<String>), Box<dyn std::error::Error>>;
|
||||
|
||||
#[allow(clippy::const_is_empty)]
|
||||
pub fn get_version() -> String {
|
||||
// 获取最新的 tag
|
||||
if let Ok(latest_tag) = get_latest_tag() {
|
||||
// 检查当前 commit 是否比最新 tag 更新
|
||||
if is_head_newer_than_tag(&latest_tag) {
|
||||
// 如果当前 commit 更新,则提升版本号
|
||||
if let Ok(new_version) = increment_version(&latest_tag) {
|
||||
return format!("refs/tags/{new_version}");
|
||||
}
|
||||
}
|
||||
|
||||
// 如果当前 commit 就是最新 tag,或者版本提升失败,返回当前 tag
|
||||
return format!("refs/tags/{latest_tag}");
|
||||
}
|
||||
|
||||
// 如果没有 tag,使用原来的逻辑
|
||||
if !build::TAG.is_empty() {
|
||||
build::TAG.to_string()
|
||||
format!("refs/tags/{}", build::TAG)
|
||||
} else if !build::SHORT_COMMIT.is_empty() {
|
||||
format!("@{}", build::SHORT_COMMIT)
|
||||
} else {
|
||||
build::PKG_VERSION.to_string()
|
||||
format!("refs/tags/{}", build::PKG_VERSION)
|
||||
}
|
||||
}
|
||||
|
||||
/// 获取最新的 git tag
|
||||
fn get_latest_tag() -> Result<String, Box<dyn std::error::Error>> {
|
||||
let output = Command::new("git").args(["describe", "--tags", "--abbrev=0"]).output()?;
|
||||
|
||||
if output.status.success() {
|
||||
let tag = String::from_utf8(output.stdout)?;
|
||||
Ok(tag.trim().to_string())
|
||||
} else {
|
||||
Err("Failed to get latest tag".into())
|
||||
}
|
||||
}
|
||||
|
||||
/// 检查当前 HEAD 是否比指定的 tag 更新
|
||||
fn is_head_newer_than_tag(tag: &str) -> bool {
|
||||
let output = Command::new("git")
|
||||
.args(["merge-base", "--is-ancestor", tag, "HEAD"])
|
||||
.output();
|
||||
|
||||
match output {
|
||||
Ok(result) => result.status.success(),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// 提升版本号(增加 patch 版本)
|
||||
fn increment_version(version: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// 解析版本号,例如 "1.0.0-alpha.19" -> (1, 0, 0, Some("alpha.19"))
|
||||
let (major, minor, patch, pre_release) = parse_version(version)?;
|
||||
|
||||
// 如果有预发布标识符,则增加预发布版本号
|
||||
if let Some(pre) = pre_release {
|
||||
if let Some(new_pre) = increment_pre_release(&pre) {
|
||||
return Ok(format!("{major}.{minor}.{patch}-{new_pre}"));
|
||||
}
|
||||
}
|
||||
|
||||
// 否则增加 patch 版本号
|
||||
Ok(format!("{major}.{minor}.{}", patch + 1))
|
||||
}
|
||||
|
||||
/// 解析版本号
|
||||
pub fn parse_version(version: &str) -> VersionParseResult {
|
||||
let parts: Vec<&str> = version.split('-').collect();
|
||||
let base_version = parts[0];
|
||||
let pre_release = if parts.len() > 1 { Some(parts[1..].join("-")) } else { None };
|
||||
|
||||
let version_parts: Vec<&str> = base_version.split('.').collect();
|
||||
if version_parts.len() < 3 {
|
||||
return Err("Invalid version format".into());
|
||||
}
|
||||
|
||||
let major: u32 = version_parts[0].parse()?;
|
||||
let minor: u32 = version_parts[1].parse()?;
|
||||
let patch: u32 = version_parts[2].parse()?;
|
||||
|
||||
Ok((major, minor, patch, pre_release))
|
||||
}
|
||||
|
||||
/// 增加预发布版本号
|
||||
fn increment_pre_release(pre_release: &str) -> Option<String> {
|
||||
// 处理形如 "alpha.19" 的预发布版本
|
||||
let parts: Vec<&str> = pre_release.split('.').collect();
|
||||
if parts.len() == 2 {
|
||||
if let Ok(num) = parts[1].parse::<u32>() {
|
||||
return Some(format!("{}.{}", parts[0], num + 1));
|
||||
}
|
||||
}
|
||||
|
||||
// 处理形如 "alpha19" 的预发布版本
|
||||
if let Some(pos) = pre_release.rfind(|c: char| c.is_alphabetic()) {
|
||||
let prefix = &pre_release[..=pos];
|
||||
let suffix = &pre_release[pos + 1..];
|
||||
if let Ok(num) = suffix.parse::<u32>() {
|
||||
return Some(format!("{prefix}{}", num + 1));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Clean version string - removes common prefixes
|
||||
pub fn clean_version(version: &str) -> String {
|
||||
version
|
||||
.trim()
|
||||
.trim_start_matches("refs/tags/")
|
||||
.trim_start_matches('v')
|
||||
.trim_start_matches("RELEASE.")
|
||||
.trim_start_matches('@')
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Compare two versions to determine if the latest is newer
|
||||
pub fn is_newer_version(current: &str, latest: &str) -> Result<bool, Box<dyn std::error::Error>> {
|
||||
// Clean version numbers, remove prefixes like "v", "RELEASE.", etc.
|
||||
let current_clean = clean_version(current);
|
||||
let latest_clean = clean_version(latest);
|
||||
|
||||
// If versions are the same, no update is needed
|
||||
if current_clean == latest_clean {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Try semantic version comparison using parse_version
|
||||
match (parse_version(¤t_clean), parse_version(&latest_clean)) {
|
||||
(Ok(current_parts), Ok(latest_parts)) => Ok(compare_version_parts(¤t_parts, &latest_parts)),
|
||||
(Err(_), _) | (_, Err(_)) => {
|
||||
// If semantic version comparison fails, use string comparison
|
||||
Ok(latest_clean > current_clean)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compare two version parts tuples (major, minor, patch, pre_release)
|
||||
fn compare_version_parts(current: &(u32, u32, u32, Option<String>), latest: &(u32, u32, u32, Option<String>)) -> bool {
|
||||
let (cur_major, cur_minor, cur_patch, cur_pre) = current;
|
||||
let (lat_major, lat_minor, lat_patch, lat_pre) = latest;
|
||||
|
||||
// Compare major version
|
||||
if lat_major != cur_major {
|
||||
return lat_major > cur_major;
|
||||
}
|
||||
|
||||
// Compare minor version
|
||||
if lat_minor != cur_minor {
|
||||
return lat_minor > cur_minor;
|
||||
}
|
||||
|
||||
// Compare patch version
|
||||
if lat_patch != cur_patch {
|
||||
return lat_patch > cur_patch;
|
||||
}
|
||||
|
||||
// Compare pre-release versions
|
||||
match (cur_pre, lat_pre) {
|
||||
(None, None) => false, // Same version
|
||||
(Some(_), None) => true, // Pre-release < release
|
||||
(None, Some(_)) => false, // Release > pre-release
|
||||
(Some(cur_pre), Some(lat_pre)) => {
|
||||
// Both are pre-release, compare them
|
||||
compare_pre_release(cur_pre, lat_pre)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compare pre-release versions
|
||||
fn compare_pre_release(current: &str, latest: &str) -> bool {
|
||||
// Split by dots and compare each part
|
||||
let current_parts: Vec<&str> = current.split('.').collect();
|
||||
let latest_parts: Vec<&str> = latest.split('.').collect();
|
||||
|
||||
for (cur_part, lat_part) in current_parts.iter().zip(latest_parts.iter()) {
|
||||
// Try to parse as numbers first
|
||||
match (cur_part.parse::<u32>(), lat_part.parse::<u32>()) {
|
||||
(Ok(cur_num), Ok(lat_num)) => {
|
||||
if cur_num != lat_num {
|
||||
return lat_num > cur_num;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// If not numbers, compare as strings
|
||||
if cur_part != lat_part {
|
||||
return lat_part > cur_part;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all compared parts are equal, longer version is newer
|
||||
latest_parts.len() > current_parts.len()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_version() {
|
||||
// 测试标准版本解析
|
||||
let (major, minor, patch, pre_release) = parse_version("1.0.0").unwrap();
|
||||
assert_eq!(major, 1);
|
||||
assert_eq!(minor, 0);
|
||||
assert_eq!(patch, 0);
|
||||
assert_eq!(pre_release, None);
|
||||
|
||||
// 测试预发布版本解析
|
||||
let (major, minor, patch, pre_release) = parse_version("1.0.0-alpha.19").unwrap();
|
||||
assert_eq!(major, 1);
|
||||
assert_eq!(minor, 0);
|
||||
assert_eq!(patch, 0);
|
||||
assert_eq!(pre_release, Some("alpha.19".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_pre_release() {
|
||||
// 测试 alpha.19 -> alpha.20
|
||||
assert_eq!(increment_pre_release("alpha.19"), Some("alpha.20".to_string()));
|
||||
|
||||
// 测试 beta.5 -> beta.6
|
||||
assert_eq!(increment_pre_release("beta.5"), Some("beta.6".to_string()));
|
||||
|
||||
// 测试无法解析的情况
|
||||
assert_eq!(increment_pre_release("unknown"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_increment_version() {
|
||||
// 测试预发布版本递增
|
||||
assert_eq!(increment_version("1.0.0-alpha.19").unwrap(), "1.0.0-alpha.20");
|
||||
|
||||
// 测试标准版本递增
|
||||
assert_eq!(increment_version("1.0.0").unwrap(), "1.0.1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_format() {
|
||||
// 测试版本格式是否以 refs/tags/ 开头
|
||||
let version = get_version();
|
||||
assert!(version.starts_with("refs/tags/") || version.starts_with("@"));
|
||||
|
||||
// 如果是 refs/tags/ 格式,应该包含版本号
|
||||
if let Some(version_part) = version.strip_prefix("refs/tags/") {
|
||||
assert!(!version_part.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_current_version_output() {
|
||||
// 显示当前版本输出
|
||||
let version = get_version();
|
||||
println!("Current version: {version}");
|
||||
|
||||
// 验证版本格式
|
||||
assert!(version.starts_with("refs/tags/") || version.starts_with("@"));
|
||||
|
||||
// 如果是 refs/tags/ 格式,验证版本号不为空
|
||||
if let Some(version_part) = version.strip_prefix("refs/tags/") {
|
||||
assert!(!version_part.is_empty());
|
||||
println!("Version part: {version_part}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clean_version() {
|
||||
assert_eq!(clean_version("v1.0.0"), "1.0.0");
|
||||
assert_eq!(clean_version("RELEASE.1.0.0"), "1.0.0");
|
||||
assert_eq!(clean_version("@1.0.0"), "1.0.0");
|
||||
assert_eq!(clean_version("1.0.0"), "1.0.0");
|
||||
assert_eq!(clean_version("refs/tags/1.0.0-alpha.17"), "1.0.0-alpha.17");
|
||||
assert_eq!(clean_version("refs/tags/v1.0.0"), "1.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_newer_version() {
|
||||
// Test semantic version comparison
|
||||
assert!(is_newer_version("1.0.0", "1.0.1").unwrap());
|
||||
assert!(is_newer_version("1.0.0", "1.1.0").unwrap());
|
||||
assert!(is_newer_version("1.0.0", "2.0.0").unwrap());
|
||||
assert!(!is_newer_version("1.0.1", "1.0.0").unwrap());
|
||||
assert!(!is_newer_version("1.0.0", "1.0.0").unwrap());
|
||||
|
||||
// Test version comparison with pre-release identifiers
|
||||
assert!(is_newer_version("1.0.0-alpha.1", "1.0.0-alpha.2").unwrap());
|
||||
assert!(is_newer_version("1.0.0-alpha.17", "1.0.1").unwrap());
|
||||
assert!(is_newer_version("refs/tags/1.0.0-alpha.16", "refs/tags/1.0.0-alpha.17").unwrap());
|
||||
assert!(!is_newer_version("refs/tags/1.0.0-alpha.17", "refs/tags/1.0.0-alpha.16").unwrap());
|
||||
|
||||
// Test pre-release vs release comparison
|
||||
assert!(is_newer_version("1.0.0-alpha.1", "1.0.0").unwrap());
|
||||
assert!(is_newer_version("1.0.0-beta.1", "1.0.0").unwrap());
|
||||
assert!(!is_newer_version("1.0.0", "1.0.0-alpha.1").unwrap());
|
||||
assert!(!is_newer_version("1.0.0", "1.0.0-beta.1").unwrap());
|
||||
|
||||
// Test pre-release version ordering
|
||||
assert!(is_newer_version("1.0.0-alpha.1", "1.0.0-alpha.2").unwrap());
|
||||
assert!(is_newer_version("1.0.0-alpha.19", "1.0.0-alpha.20").unwrap());
|
||||
assert!(is_newer_version("1.0.0-alpha.1", "1.0.0-beta.1").unwrap());
|
||||
assert!(is_newer_version("1.0.0-beta.1", "1.0.0-rc.1").unwrap());
|
||||
|
||||
// Test complex pre-release versions
|
||||
assert!(is_newer_version("1.0.0-alpha.1.2", "1.0.0-alpha.1.3").unwrap());
|
||||
assert!(is_newer_version("1.0.0-alpha.1", "1.0.0-alpha.1.1").unwrap());
|
||||
assert!(!is_newer_version("1.0.0-alpha.1.3", "1.0.0-alpha.1.2").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compare_version_parts() {
|
||||
// Test basic version comparison
|
||||
assert!(compare_version_parts(&(1, 0, 0, None), &(1, 0, 1, None)));
|
||||
assert!(compare_version_parts(&(1, 0, 0, None), &(1, 1, 0, None)));
|
||||
assert!(compare_version_parts(&(1, 0, 0, None), &(2, 0, 0, None)));
|
||||
assert!(!compare_version_parts(&(1, 0, 1, None), &(1, 0, 0, None)));
|
||||
|
||||
// Test pre-release vs release
|
||||
assert!(compare_version_parts(&(1, 0, 0, Some("alpha.1".to_string())), &(1, 0, 0, None)));
|
||||
assert!(!compare_version_parts(&(1, 0, 0, None), &(1, 0, 0, Some("alpha.1".to_string()))));
|
||||
|
||||
// Test pre-release comparison
|
||||
assert!(compare_version_parts(
|
||||
&(1, 0, 0, Some("alpha.1".to_string())),
|
||||
&(1, 0, 0, Some("alpha.2".to_string()))
|
||||
));
|
||||
assert!(compare_version_parts(
|
||||
&(1, 0, 0, Some("alpha.19".to_string())),
|
||||
&(1, 0, 0, Some("alpha.20".to_string()))
|
||||
));
|
||||
assert!(compare_version_parts(
|
||||
&(1, 0, 0, Some("alpha.1".to_string())),
|
||||
&(1, 0, 0, Some("beta.1".to_string()))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compare_pre_release() {
|
||||
// Test numeric pre-release comparison
|
||||
assert!(compare_pre_release("alpha.1", "alpha.2"));
|
||||
assert!(compare_pre_release("alpha.19", "alpha.20"));
|
||||
assert!(!compare_pre_release("alpha.2", "alpha.1"));
|
||||
|
||||
// Test string pre-release comparison
|
||||
assert!(compare_pre_release("alpha.1", "beta.1"));
|
||||
assert!(compare_pre_release("beta.1", "rc.1"));
|
||||
assert!(!compare_pre_release("beta.1", "alpha.1"));
|
||||
|
||||
// Test complex pre-release comparison
|
||||
assert!(compare_pre_release("alpha.1.2", "alpha.1.3"));
|
||||
assert!(compare_pre_release("alpha.1", "alpha.1.1"));
|
||||
assert!(!compare_pre_release("alpha.1.3", "alpha.1.2"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ export RUSTFS_CONSOLE_ADDRESS=":9001"
|
||||
# export RUSTFS_TLS_PATH="./deploy/certs"
|
||||
|
||||
# 可观测性 相关配置信息
|
||||
export RUSTFS_OBS_ENDPOINT=http://localhost:4317 # OpenTelemetry Collector 的地址
|
||||
#export RUSTFS_OBS_ENDPOINT=http://localhost:4317 # OpenTelemetry Collector 的地址
|
||||
#export RUSTFS_OBS_USE_STDOUT=false # 是否使用标准输出
|
||||
#export RUSTFS_OBS_SAMPLE_RATIO=2.0 # 采样率,0.0-1.0之间,0.0表示不采样,1.0表示全部采样
|
||||
#export RUSTFS_OBS_METER_INTERVAL=1 # 采样间隔,单位为秒
|
||||
|
||||
Reference in New Issue
Block a user