bug: potential fix for cross-platform test workflows (#9255)

This commit is contained in:
Eric Pinzur 2025-07-31 12:55:50 +02:00 committed by GitHub
commit 25f1107e18
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 47 additions and 141 deletions

View file

@ -27,6 +27,9 @@ jobs:
install-method: "pypi"
test-timeout: ${{ inputs.test-timeout }}
langflow-version: ${{ inputs.langflow-version }}
base-artifact-name: ""
main-artifact-name: ""
run-id: ""
test-source-build:
if: inputs.test-from-pypi == false
@ -36,6 +39,7 @@ jobs:
base-artifact-name: ""
main-artifact-name: ""
test-timeout: ${{ inputs.test-timeout }}
run-id: ""
test-summary:
name: Test Summary

View file

@ -79,35 +79,6 @@ jobs:
python-version: "3.12"
steps:
- name: Debug workflow inputs
run: |
echo "Shared workflow received inputs:"
echo " install-method: ${{ inputs.install-method }}"
echo " test-timeout: ${{ inputs.test-timeout }}"
echo " base-artifact-name: '${{ inputs.base-artifact-name }}'"
echo " main-artifact-name: '${{ inputs.main-artifact-name }}'"
echo " langflow-version: '${{ inputs.langflow-version }}'"
echo " run-id: '${{ inputs.run-id }}'"
echo ""
echo "Matrix values:"
echo " os: ${{ matrix.os }}"
echo " arch: ${{ matrix.arch }}"
echo " python-version: ${{ matrix.python-version }}"
echo " runner: ${{ matrix.runner }}"
shell: bash
- name: Validate required parameters for wheel installation
if: inputs.install-method == 'wheel'
run: |
if [ -z "${{ inputs.base-artifact-name }}" ] || [ -z "${{ inputs.main-artifact-name }}" ]; then
echo "❌ Error: base-artifact-name and main-artifact-name are required when install-method is 'wheel'"
echo " base-artifact-name: '${{ inputs.base-artifact-name }}'"
echo " main-artifact-name: '${{ inputs.main-artifact-name }}'"
exit 1
fi
echo "✅ Required parameters for wheel installation are present"
shell: bash
- name: Setup Python
uses: actions/setup-python@v5
with:
@ -332,17 +303,3 @@ jobs:
"
shell: bash
test-summary:
name: Cross-Platform Test Summary
needs: test-installation
runs-on: ubuntu-latest
if: always()
steps:
- name: Check test results
run: |
if [ "${{ needs.test-installation.result }}" != "success" ]; then
echo "❌ Cross-platform tests failed"
exit 1
else
echo "✅ All cross-platform tests passed"
fi

View file

@ -1,6 +1,6 @@
# Ad-Hoc Cross-Platform Install Tests
# Cross-Platform Install Tests
Quick guide for running cross-platform installation tests manually.
Guide for running cross-platform installation tests manually and programmatically.
## Available Tests
@ -16,10 +16,10 @@ Tests published langflow packages from PyPI across all platforms.
**Via CLI:**
```bash
# Test latest version
gh workflow run manual-cross-platform-test.yml -f test-from-pypi=true
gh workflow run cross-platform-test-manual.yml -f test-from-pypi=true
# Test specific version
gh workflow run manual-cross-platform-test.yml \
gh workflow run cross-platform-test-manual.yml \
-f test-from-pypi=true \
-f langflow-version="1.0.18"
```
@ -35,7 +35,7 @@ Builds and tests langflow from current branch source code.
**Via CLI:**
```bash
# Test current branch
gh workflow run manual-cross-platform-test.yml -f test-from-pypi=false
gh workflow run cross-platform-test-manual.yml -f test-from-pypi=false
```
## Platforms Tested
@ -59,11 +59,11 @@ gh workflow run manual-cross-platform-test.yml -f test-from-pypi=false
```bash
# Extended timeout (10 minutes instead of default 5)
gh workflow run manual-cross-platform-test.yml \
gh workflow run cross-platform-test-manual.yml \
-f test-timeout=10
# Test specific PyPI version
gh workflow run manual-cross-platform-test.yml \
gh workflow run cross-platform-test-manual.yml \
-f test-from-pypi=true \
-f langflow-version="1.0.18"
```
@ -93,9 +93,38 @@ gh workflow run manual-cross-platform-test.yml \
- **Virtual Environments**: Uses `uv venv --seed` for consistent pip availability
### Workflow Architecture
- **Shared Logic**: Common test steps defined in `shared-cross-platform-test.yml`
- **DRY principle**: No code duplication between manual and automated workflows
- **Flexible**: Supports both wheel and PyPI installation methods through single workflow
```
Manual Entry Point:
└── cross-platform-test-manual.yml (workflow_dispatch)
├── PyPI Mode → cross-platform-test-shared.yml
└── Source Mode → cross-platform-test.yml → cross-platform-test-shared.yml
Programmatic Entry Point:
└── cross-platform-test.yml (workflow_call only)
└── cross-platform-test-shared.yml
```
- **Manual Workflow**: User-facing interface with PyPI/source options
- **Main Workflow**: Internal workflow for programmatic calls (CI, releases)
- **Shared Workflow**: Core test execution logic (matrix jobs)
- **Single Entry Point**: Use `cross-platform-test-manual.yml` for all manual testing
### Parameter Requirements
⚠️ **Important**: When calling reusable workflows from `workflow_dispatch` triggers, **all parameters must be explicitly provided**, even optional ones with defaults. Missing optional parameters can cause workflows to be silently skipped.
**Example of correct parameter passing:**
```yaml
uses: ./.github/workflows/cross-platform-test-shared.yml
with:
install-method: "wheel"
test-timeout: 5
langflow-version: "" # ← Required even though optional
base-artifact-name: "dist"
main-artifact-name: "dist"
run-id: "" # ← Required even though optional
```
## Results

View file

@ -1,28 +1,6 @@
name: Cross-Platform Installation Test
on:
workflow_dispatch:
inputs:
base-artifact-name:
description: "Name of the base package artifact (leave empty to use latest from main branch)"
required: false
type: string
default: ""
main-artifact-name:
description: "Name of the main package artifact (leave empty to use latest from main branch)"
required: false
type: string
default: ""
test-timeout:
description: "Timeout for langflow server startup test (minutes)"
required: false
type: number
default: 5
run-id:
description: "GitHub run ID to download artifacts from (leave empty for latest successful run)"
required: false
type: string
default: ""
workflow_call:
inputs:
base-artifact-name:
@ -81,80 +59,18 @@ jobs:
run: |
echo "base-artifact-name=adhoc-dist-base" >> $GITHUB_OUTPUT
echo "main-artifact-name=adhoc-dist-main" >> $GITHUB_OUTPUT
- name: Debug artifact names
run: |
echo "Build job outputs:"
echo " base-artifact-name: adhoc-dist-base"
echo " main-artifact-name: adhoc-dist-main"
echo "Input parameters:"
echo " inputs.base-artifact-name: '${{ inputs.base-artifact-name }}'"
echo " inputs.main-artifact-name: '${{ inputs.main-artifact-name }}'"
resolve-parameters:
name: Resolve Parameters
needs: [build-if-needed]
if: always() && (needs.build-if-needed.result == 'success' || needs.build-if-needed.result == 'skipped')
runs-on: ubuntu-latest
outputs:
base-artifact-name: ${{ steps.resolve.outputs.base-artifact-name }}
main-artifact-name: ${{ steps.resolve.outputs.main-artifact-name }}
test-timeout: ${{ steps.resolve.outputs.test-timeout }}
steps:
- name: Resolve artifact names
id: resolve
run: |
# Resolve base artifact name
if [ -n "${{ inputs.base-artifact-name }}" ] && [ "${{ inputs.base-artifact-name }}" != "" ]; then
BASE_NAME="${{ inputs.base-artifact-name }}"
elif [ -n "${{ needs.build-if-needed.outputs.base-artifact-name }}" ]; then
BASE_NAME="${{ needs.build-if-needed.outputs.base-artifact-name }}"
else
BASE_NAME="adhoc-dist-base"
fi
# Resolve main artifact name
if [ -n "${{ inputs.main-artifact-name }}" ] && [ "${{ inputs.main-artifact-name }}" != "" ]; then
MAIN_NAME="${{ inputs.main-artifact-name }}"
elif [ -n "${{ needs.build-if-needed.outputs.main-artifact-name }}" ]; then
MAIN_NAME="${{ needs.build-if-needed.outputs.main-artifact-name }}"
else
MAIN_NAME="adhoc-dist-main"
fi
# Resolve test timeout
if [ -n "${{ inputs.test-timeout }}" ]; then
TIMEOUT="${{ inputs.test-timeout }}"
else
TIMEOUT="5"
fi
echo "base-artifact-name=${BASE_NAME}" >> $GITHUB_OUTPUT
echo "main-artifact-name=${MAIN_NAME}" >> $GITHUB_OUTPUT
echo "test-timeout=${TIMEOUT}" >> $GITHUB_OUTPUT
echo "Resolved parameters:"
echo " base-artifact-name: ${BASE_NAME}"
echo " main-artifact-name: ${MAIN_NAME}"
echo " test-timeout: ${TIMEOUT}"
echo ""
echo "Input values:"
echo " inputs.base-artifact-name: '${{ inputs.base-artifact-name }}'"
echo " inputs.main-artifact-name: '${{ inputs.main-artifact-name }}'"
echo " inputs.test-timeout: '${{ inputs.test-timeout }}'"
echo " build-if-needed.outputs.base-artifact-name: '${{ needs.build-if-needed.outputs.base-artifact-name }}'"
echo " build-if-needed.outputs.main-artifact-name: '${{ needs.build-if-needed.outputs.main-artifact-name }}'"
test-wheel-installation:
name: Test Wheel Installation
needs: [build-if-needed, resolve-parameters]
needs: [build-if-needed]
if: always() && (needs.build-if-needed.result == 'success' || needs.build-if-needed.result == 'skipped')
uses: ./.github/workflows/cross-platform-test-shared.yml
with:
install-method: "wheel"
test-timeout: 5
test-timeout: ${{ inputs.test-timeout }}
langflow-version: ""
base-artifact-name: "adhoc-dist-base"
main-artifact-name: "adhoc-dist-main"
base-artifact-name: ${{ inputs.base-artifact-name || needs.build-if-needed.outputs.base-artifact-name }}
main-artifact-name: ${{ inputs.main-artifact-name || needs.build-if-needed.outputs.main-artifact-name }}
run-id: ""
test-summary: