mirror of
https://github.com/astral-sh/setup-uv.git
synced 2026-03-13 01:26:47 +00:00
Compare commits
141 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e06108dd0a | ||
|
|
0f6ec07aaf | ||
|
|
821e5c9815 | ||
|
|
6ee6290f1c | ||
|
|
9f332a133a | ||
|
|
0acf9708ce | ||
|
|
fe3617d6e9 | ||
|
|
2ff70eebcc | ||
|
|
5ba8a7e5d0 | ||
|
|
4bc8fabc0c | ||
|
|
950b623541 | ||
|
|
09ff6fe0ae | ||
|
|
bd870193dd | ||
|
|
f8858e6756 | ||
|
|
5a095e7a20 | ||
|
|
b12532f27f | ||
|
|
0098a7571c | ||
|
|
2e7ed0e2bb | ||
|
|
04224aa8ca | ||
|
|
2bc602ff89 | ||
|
|
dd9d748439 | ||
|
|
14eede1834 | ||
|
|
c452423b2c | ||
|
|
eac588ad8d | ||
|
|
a97c6cbe9c | ||
|
|
02182fa02a | ||
|
|
a3b3eaea92 | ||
|
|
78cebeceac | ||
|
|
b6b8e2cd6a | ||
|
|
e31bec8546 | ||
|
|
db2b65ebae | ||
|
|
3511ff7054 | ||
|
|
99b0f0474b | ||
|
|
db4d6bf3d6 | ||
|
|
98e1309028 | ||
|
|
5ed2ede620 | ||
|
|
5fca386933 | ||
|
|
803947b9bd | ||
|
|
24553ac46d | ||
|
|
085087a5d3 | ||
|
|
9cfd029643 | ||
|
|
dd9d55bc18 | ||
|
|
8512ad0289 | ||
|
|
cc5581700e | ||
|
|
61cb8a9741 | ||
|
|
11050edb83 | ||
|
|
1d22fafd8b | ||
|
|
f4ed82a8ce | ||
|
|
e0409b43c0 | ||
|
|
702b425af1 | ||
|
|
2630c86ac3 | ||
|
|
45cfcb3be5 | ||
|
|
ce0a8994de | ||
|
|
9c8d030b7f | ||
|
|
681c641aba | ||
|
|
2e85713bb0 | ||
|
|
58b6d7b303 | ||
|
|
e8b52af86e | ||
|
|
ed21f2f24f | ||
|
|
93202d8fbe | ||
|
|
5ce090076d | ||
|
|
4180991cd9 | ||
|
|
0439606c8e | ||
|
|
7dd56c18e9 | ||
|
|
9c12baee96 | ||
|
|
64f7f4e15f | ||
|
|
5ae467fbf9 | ||
|
|
06e4edb239 | ||
|
|
8f1d388b4b | ||
|
|
d500d41ebf | ||
|
|
1e64fb113b | ||
|
|
be7fc19b41 | ||
|
|
1e862dfacb | ||
|
|
d7d33e16d4 | ||
|
|
486d0b8872 | ||
|
|
5a7eac68fb | ||
|
|
b49dc9e882 | ||
|
|
30ce38e206 | ||
|
|
0d20755a23 | ||
|
|
8491d1d9a3 | ||
|
|
85856786d1 | ||
|
|
22d500a65c | ||
|
|
14d557131d | ||
|
|
29cd2350cd | ||
|
|
2ddd2b9cb3 | ||
|
|
b7bf78939d | ||
|
|
cb6c0a53d9 | ||
|
|
dffc6292f2 | ||
|
|
6e346e1653 | ||
|
|
3ccd0fd498 | ||
|
|
ce6dbd84e1 | ||
|
|
2382069a66 | ||
|
|
b1daf91f4e | ||
|
|
3259c6206f | ||
|
|
bf8e8ed895 | ||
|
|
9c6b5e9fb5 | ||
|
|
a5129e99f4 | ||
|
|
d18bcc753a | ||
|
|
bd1f875aba | ||
|
|
1a91c3851d | ||
|
|
c79f606987 | ||
|
|
e0249f1599 | ||
|
|
6d2eb15b49 | ||
|
|
3495667518 | ||
|
|
eb1897b8dc | ||
|
|
d78d791822 | ||
|
|
535dc2664c | ||
|
|
f610be5ff9 | ||
|
|
3deccc0075 | ||
|
|
d9ee7e2f26 | ||
|
|
59a0868fea | ||
|
|
c952556164 | ||
|
|
51c3328db2 | ||
|
|
f2859da213 | ||
|
|
f9c6974d8b | ||
|
|
82f21a54fe | ||
|
|
d8a37f6566 | ||
|
|
d0cc045d04 | ||
|
|
2841f9f5c1 | ||
|
|
e554b93b80 | ||
|
|
c7d85d9988 | ||
|
|
07f2cb5db9 | ||
|
|
208b0c0ee4 | ||
|
|
b75a909f75 | ||
|
|
ffff8aa2b5 | ||
|
|
95d0e233fa | ||
|
|
dc724a12b6 | ||
|
|
f67343ac2e | ||
|
|
4dd9f52a47 | ||
|
|
e1e6fe7910 | ||
|
|
b1836110f7 | ||
|
|
557e51de59 | ||
|
|
1b46e13ec8 | ||
|
|
26cf676705 | ||
|
|
4e1e303f7d | ||
|
|
4959332f0f | ||
|
|
adeb28643f | ||
|
|
fce199e243 | ||
|
|
f758a4a1eb | ||
|
|
c0e7e93474 | ||
|
|
fda2399cb3 |
48
.agents/skills/dependabot-pr-rollup/SKILL.md
Normal file
48
.agents/skills/dependabot-pr-rollup/SKILL.md
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
name: dependabot-pr-rollup
|
||||
description: Find open Dependabot PRs for the current GitHub repo, compare each PR head to its base branch, replay only the net dependency changes in a fresh worktree and branch, run npm validation, and optionally commit, push, and open a PR. Use when you want to batch or manually replicate active Dependabot updates.
|
||||
license: MIT
|
||||
compatibility: Requires git, git worktree, gh CLI auth, npm, and a GitHub repo with an origin remote.
|
||||
---
|
||||
|
||||
# Dependabot PR Rollup
|
||||
|
||||
## When to use
|
||||
|
||||
Use this skill when the user wants to:
|
||||
- find all open Dependabot PRs in the current repo
|
||||
- reproduce their net effect in one local branch
|
||||
- validate the result with the repo's standard npm checks
|
||||
- optionally commit, push, and open a PR
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Inspect the current checkout state, but do not reuse a dirty worktree.
|
||||
2. List open Dependabot PRs with `gh pr list --state open --author app/dependabot`.
|
||||
3. For each PR, collect the title, base branch, head branch, changed files, and relevant diffs.
|
||||
4. Compare each PR head against `origin/<base>` instead of trusting the PR title. Dependabot PRs can already be partially merged, superseded by newer versions, or have no remaining net effect.
|
||||
5. Create a new worktree and branch from `origin/<base>`.
|
||||
6. Reproduce only the remaining dependency changes in the new worktree.
|
||||
- Inspect `package.json` before editing.
|
||||
- Run `npm ci --ignore-scripts` before applying updates.
|
||||
- Use `npm install ... --ignore-scripts` for direct dependency changes so `package-lock.json` stays in sync.
|
||||
7. Run `npm run all`.
|
||||
8. If requested, commit the changed source, lockfile, and generated artifacts, then push and open a PR.
|
||||
|
||||
## Repo-specific notes
|
||||
|
||||
- Use `gh` for GitHub operations.
|
||||
- Keep the user's original checkout untouched by working in a separate worktree.
|
||||
- In this repo, `npm run all` is the safest validation command because it runs build, check, package, and test.
|
||||
- If dependency changes affect bundled output, include the regenerated `dist/` files.
|
||||
|
||||
## Report back
|
||||
|
||||
Always report:
|
||||
- open Dependabot PRs found
|
||||
- which PRs required no net changes
|
||||
- new branch name
|
||||
- new worktree path
|
||||
- files changed
|
||||
- `npm run all` result
|
||||
- if applicable, commit SHA and PR URL
|
||||
4
.github/actionlint.yaml
vendored
4
.github/actionlint.yaml
vendored
@@ -7,3 +7,7 @@ self-hosted-runner:
|
||||
# organization. `null` means disabling configuration variables check.
|
||||
# Empty array means no configuration variable is allowed.
|
||||
config-variables: null
|
||||
paths:
|
||||
.github/workflows/test.yml:
|
||||
ignore:
|
||||
- 'invalid runner name.+'
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -4,8 +4,12 @@ updates:
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
cooldown:
|
||||
default-days: 7
|
||||
|
||||
2
.github/release-drafter.yml
vendored
2
.github/release-drafter.yml
vendored
@@ -19,7 +19,7 @@ categories:
|
||||
labels:
|
||||
- "maintenance"
|
||||
- "ci"
|
||||
- "update-known-versions"
|
||||
- "update-known-checksums"
|
||||
- title: "📚 Documentation"
|
||||
labels:
|
||||
- "documentation"
|
||||
|
||||
12
.github/workflows/codeql-analysis.yml
vendored
12
.github/workflows/codeql-analysis.yml
vendored
@@ -21,6 +21,8 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -39,11 +41,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
source-root: src
|
||||
@@ -55,7 +59,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
@@ -69,4 +73,4 @@ jobs:
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||
|
||||
4
.github/workflows/release-drafter.yml
vendored
4
.github/workflows/release-drafter.yml
vendored
@@ -8,6 +8,8 @@ on:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
name: ✏️ Draft release
|
||||
@@ -17,6 +19,6 @@ jobs:
|
||||
pull-requests: read
|
||||
steps:
|
||||
- name: 🚀 Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v6.1.0
|
||||
uses: release-drafter/release-drafter@6db134d15f3909ccc9eefd369f02bd1e9cffdf97 # v6.2.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
864
.github/workflows/test.yml
vendored
864
.github/workflows/test.yml
vendored
File diff suppressed because it is too large
Load Diff
68
.github/workflows/update-known-checksums.yml
vendored
Normal file
68
.github/workflows/update-known-checksums.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
name: "Update known checksums"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 4 * * *" # Run every day at 4am UTC
|
||||
repository_dispatch:
|
||||
types: [ pypi_release ]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: true
|
||||
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Update known checksums
|
||||
id: update-known-checksums
|
||||
run:
|
||||
node dist/update-known-checksums/index.js
|
||||
src/download/checksum/known-checksums.ts
|
||||
- name: Check for changes
|
||||
id: changes-exist
|
||||
run: |
|
||||
git status --porcelain
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "changes-exist=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "changes-exist=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Compile changes
|
||||
if: ${{ steps.changes-exist.outputs.changes-exist == 'true' }}
|
||||
run: npm ci --ignore-scripts && npm run all
|
||||
- name: Commit and push changes
|
||||
if: ${{ steps.changes-exist.outputs.changes-exist == 'true' }}
|
||||
id: commit-and-push
|
||||
continue-on-error: true
|
||||
run: |
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
git add .
|
||||
git commit -m "chore: update known checksums for $LATEST_VERSION"
|
||||
git push origin HEAD:refs/heads/main
|
||||
env:
|
||||
LATEST_VERSION: ${{ steps.update-known-checksums.outputs.latest-version }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: ${{ steps.changes-exist.outputs.changes-exist == 'true' && steps.commit-and-push.outcome != 'success' }}
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||
with:
|
||||
commit-message: "chore: update known checksums"
|
||||
title:
|
||||
"chore: update known checksums for ${{
|
||||
steps.update-known-checksums.outputs.latest-version }}"
|
||||
body:
|
||||
"chore: update known checksums for ${{
|
||||
steps.update-known-checksums.outputs.latest-version }}"
|
||||
base: main
|
||||
labels: "automated-pr,update-known-checksums"
|
||||
branch: update-known-checksums-pr
|
||||
delete-branch: true
|
||||
39
.github/workflows/update-known-versions.yml
vendored
39
.github/workflows/update-known-versions.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: "Update known versions"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 4 * * *" # Run every day at 4am UTC
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Update known versions
|
||||
id: update-known-versions
|
||||
run:
|
||||
node dist/update-known-versions/index.js
|
||||
src/download/checksum/known-checksums.ts
|
||||
version-manifest.json
|
||||
${{ secrets.GITHUB_TOKEN }}
|
||||
- run: npm install && npm run all
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
commit-message: "chore: update known versions"
|
||||
title:
|
||||
"chore: update known versions for ${{
|
||||
steps.update-known-versions.outputs.latest-version }}"
|
||||
body:
|
||||
"chore: update known versions for ${{
|
||||
steps.update-known-versions.outputs.latest-version }}"
|
||||
base: main
|
||||
labels: "automated-pr,update-known-versions"
|
||||
branch: update-known-versions-pr
|
||||
delete-branch: true
|
||||
@@ -8,6 +8,8 @@ on:
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
update_major_minor_tags:
|
||||
name: Make sure major and minor tags are up to date on a patch release
|
||||
@@ -15,7 +17,9 @@ jobs:
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: true # needed for git push below
|
||||
- name: Update Major Minor Tags
|
||||
run: |
|
||||
set -x
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -100,3 +100,6 @@ lib/**/*
|
||||
|
||||
# Idea IDEs (PyCharm, WebStorm, IntelliJ, etc)
|
||||
.idea/
|
||||
|
||||
# Compiled scripts
|
||||
.github/scripts/*.js
|
||||
|
||||
13
AGENTS.md
Normal file
13
AGENTS.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# setup-uv agent notes
|
||||
|
||||
This repository is a TypeScript-based GitHub Action for installing `uv` in GitHub Actions workflows. It also supports restoring/saving the `uv` cache and optional managed-Python caching.
|
||||
|
||||
- The published action runs the committed bundles in `dist/`, not the TypeScript in `src/`. After any code change, run `npm run package` and commit the resulting `dist/` updates.
|
||||
- Standard local validation is:
|
||||
1. `npm ci --ignore-scripts`
|
||||
2. `npm run all`
|
||||
- `npm run check` uses Biome (not ESLint/Prettier) and rewrites files in place.
|
||||
- User-facing changes are usually multi-file changes. If you add or change inputs, outputs, or behavior, update `action.yml`, the implementation in `src/`, tests in `__tests__/`, relevant docs/README, and then re-package.
|
||||
- The easiest areas to regress are version resolution and caching. When touching them, add or update tests for precedence, cache invalidation, and cross-platform path behavior.
|
||||
- Workflow edits have extra CI-only checks (`actionlint` and `zizmor`); `npm run all` does not cover them.
|
||||
- Before finishing, make sure validation does not leave generated or formatting-only diffs behind.
|
||||
536
README.md
536
README.md
@@ -12,25 +12,11 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
|
||||
|
||||
- [Usage](#usage)
|
||||
- [Install a required-version or latest (default)](#install-a-required-version-or-latest-default)
|
||||
- [Install the latest version](#install-the-latest-version)
|
||||
- [Install a specific version](#install-a-specific-version)
|
||||
- [Install a version by supplying a semver range or pep440 specifier](#install-a-version-by-supplying-a-semver-range-or-pep440-specifier)
|
||||
- [Install a version defined in a requirements or config file](#install-a-version-defined-in-a-requirements-or-config-file)
|
||||
- [Inputs](#inputs)
|
||||
- [Outputs](#outputs)
|
||||
- [Python version](#python-version)
|
||||
- [Activate environment](#activate-environment)
|
||||
- [Working directory](#working-directory)
|
||||
- [Validate checksum](#validate-checksum)
|
||||
- [Enable Caching](#enable-caching)
|
||||
- [Cache dependency glob](#cache-dependency-glob)
|
||||
- [Local cache path](#local-cache-path)
|
||||
- [Disable cache pruning](#disable-cache-pruning)
|
||||
- [Ignore nothing to cache](#ignore-nothing-to-cache)
|
||||
- [GitHub authentication token](#github-authentication-token)
|
||||
- [UV_TOOL_DIR](#uv_tool_dir)
|
||||
- [UV_TOOL_BIN_DIR](#uv_tool_bin_dir)
|
||||
- [Tilde Expansion](#tilde-expansion)
|
||||
- [Manifest file](#manifest-file)
|
||||
- [Add problem matchers](#add-problem-matchers)
|
||||
- [Advanced Configuration](#advanced-configuration)
|
||||
- [How it works](#how-it-works)
|
||||
- [FAQ](#faq)
|
||||
|
||||
@@ -40,7 +26,7 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
```
|
||||
|
||||
If you do not specify a version, this action will look for a [required-version](https://docs.astral.sh/uv/reference/settings/#required-version)
|
||||
@@ -49,63 +35,101 @@ in a `uv.toml` or `pyproject.toml` file in the repository root. If none is found
|
||||
For an example workflow, see
|
||||
[here](https://github.com/charliermarsh/autobot/blob/e42c66659bf97b90ca9ff305a19cc99952d0d43f/.github/workflows/ci.yaml).
|
||||
|
||||
### Install the latest version
|
||||
### Inputs
|
||||
|
||||
All inputs and their defaults.
|
||||
Have a look under [Advanced Configuration](#advanced-configuration) for detailed documentation on most of them.
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- name: Install uv with all available options
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "latest"
|
||||
# The version of uv to install (default: searches for version in config files, then latest)
|
||||
version: ""
|
||||
|
||||
# Path to a file containing the version of uv to install (default: searches uv.toml then pyproject.toml)
|
||||
version-file: ""
|
||||
|
||||
# Resolution strategy when resolving version ranges: 'highest' or 'lowest'
|
||||
resolution-strategy: "highest"
|
||||
|
||||
# The version of Python to set UV_PYTHON to
|
||||
python-version: ""
|
||||
|
||||
# Use uv venv to activate a venv ready to be used by later steps
|
||||
activate-environment: "false"
|
||||
|
||||
# Custom path for the virtual environment when using activate-environment (default: .venv in the working directory)
|
||||
venv-path: ""
|
||||
|
||||
# The directory to execute all commands in and look for files such as pyproject.toml
|
||||
working-directory: ""
|
||||
|
||||
# The checksum of the uv version to install
|
||||
checksum: ""
|
||||
|
||||
# Used when downloading uv from GitHub releases
|
||||
github-token: ${{ github.token }}
|
||||
|
||||
# Enable uploading of the uv cache: true, false, or auto (enabled on GitHub-hosted runners, disabled on self-hosted runners)
|
||||
enable-cache: "auto"
|
||||
|
||||
# Glob pattern to match files relative to the repository root to control the cache
|
||||
cache-dependency-glob: |
|
||||
**/*requirements*.txt
|
||||
**/*requirements*.in
|
||||
**/*constraints*.txt
|
||||
**/*constraints*.in
|
||||
**/pyproject.toml
|
||||
**/uv.lock
|
||||
**/*.py.lock
|
||||
|
||||
# Whether to restore the cache if found
|
||||
restore-cache: "true"
|
||||
|
||||
# Whether to save the cache after the run
|
||||
save-cache: "true"
|
||||
|
||||
# Suffix for the cache key
|
||||
cache-suffix: ""
|
||||
|
||||
# Local path to store the cache (default: "" - uses system temp directory)
|
||||
cache-local-path: ""
|
||||
|
||||
# Prune cache before saving
|
||||
prune-cache: "true"
|
||||
|
||||
# Upload managed Python installations to the GitHub Actions cache
|
||||
cache-python: "false"
|
||||
|
||||
# Ignore when nothing is found to cache
|
||||
ignore-nothing-to-cache: "false"
|
||||
|
||||
# Ignore when the working directory is empty
|
||||
ignore-empty-workdir: "false"
|
||||
|
||||
# Custom path to set UV_TOOL_DIR to
|
||||
tool-dir: ""
|
||||
|
||||
# Custom path to set UV_TOOL_BIN_DIR to
|
||||
tool-bin-dir: ""
|
||||
|
||||
# URL to a custom manifest file (NDJSON preferred, legacy JSON array is deprecated)
|
||||
manifest-file: ""
|
||||
|
||||
# Add problem matchers
|
||||
add-problem-matchers: "true"
|
||||
```
|
||||
|
||||
### Install a specific version
|
||||
### Outputs
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "0.4.4"
|
||||
```
|
||||
|
||||
### Install a version by supplying a semver range or pep440 specifier
|
||||
|
||||
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
|
||||
or [pep440 specifier](https://peps.python.org/pep-0440/#version-specifiers)
|
||||
to install the latest version that satisfies the range.
|
||||
|
||||
```yaml
|
||||
- name: Install a semver range of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ">=0.4.0"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Pinning a minor version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "0.4.x"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Install a pep440-specifier-satisfying version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ">=0.4.25,<0.5"
|
||||
```
|
||||
|
||||
### Install a version defined in a requirements or config file
|
||||
|
||||
You can use the `version-file` input to specify a file that contains the version of uv to install.
|
||||
This can either be a `pyproject.toml` or `uv.toml` file which defines a `required-version` or
|
||||
uv defined as a dependency in `pyproject.toml` or `requirements.txt`.
|
||||
|
||||
```yaml
|
||||
- name: Install uv based on the version defined in pyproject.toml
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version-file: "pyproject.toml"
|
||||
```
|
||||
- `uv-version`: The installed uv version. Useful when using latest.
|
||||
- `uv-path`: The path to the installed uv binary.
|
||||
- `uvx-path`: The path to the installed uvx binary.
|
||||
- `cache-hit`: A boolean value to indicate a cache entry was found.
|
||||
- `venv`: Path to the activated venv if activate-environment is true.
|
||||
- `python-version`: The Python version that was set.
|
||||
- `python-cache-hit`: A boolean value to indicate the Python cache entry was found.
|
||||
|
||||
### Python version
|
||||
|
||||
@@ -115,13 +139,13 @@ This will override any python version specifications in `pyproject.toml` and `.p
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv and set the python version to 3.13t
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
python-version: 3.13t
|
||||
- run: uv pip install --python=3.13t pip
|
||||
```
|
||||
|
||||
You can combine this with a matrix to test multiple python versions:
|
||||
You can combine this with a matrix to test multiple Python versions:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
@@ -129,353 +153,49 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install the latest version of uv and set the python version
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Test with python ${{ matrix.python-version }}
|
||||
run: uv run --frozen pytest
|
||||
```
|
||||
|
||||
### Activate environment
|
||||
|
||||
You can set `activate-environment` to `true` to automatically activate a venv.
|
||||
This allows directly using it in later steps:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv and activate the environment
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
activate-environment: true
|
||||
- run: uv pip install pip
|
||||
```
|
||||
|
||||
> [!WARNING]
|
||||
>
|
||||
> Activating the environment adds your dependencies to the `PATH`, which could break some workflows.
|
||||
> For example, if you have a dependency which requires uv, e.g., `hatch`, activating the
|
||||
> environment will shadow the `uv` binary installed by this action and may result in a different uv
|
||||
> version being used.
|
||||
>
|
||||
> We do not recommend using this setting for most use-cases. Instead, use `uv run` to execute
|
||||
> commands in the environment.
|
||||
|
||||
### Working directory
|
||||
|
||||
You can set the working directory with the `working-directory` input.
|
||||
This controls where we look for `pyproject.toml`, `uv.toml` and `.python-version` files
|
||||
which are used to determine the version of uv and python to install.
|
||||
|
||||
It also controls where [the venv gets created](#activate-environment).
|
||||
It also controls where [the venv gets created](#activate-environment), unless `venv-path` is set.
|
||||
|
||||
```yaml
|
||||
- name: Install uv based on the config files in the working-directory
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
working-directory: my/subproject/dir
|
||||
```
|
||||
|
||||
### Validate checksum
|
||||
## Advanced Configuration
|
||||
|
||||
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
|
||||
are automatically verified by this action. The sha256 hashes can be found on the
|
||||
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
|
||||
For more advanced configuration options, see our detailed documentation:
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version and validate the checksum
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "0.3.1"
|
||||
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
|
||||
```
|
||||
|
||||
### Enable caching
|
||||
|
||||
> [!NOTE]
|
||||
> The cache is pruned before it is uploaded to the GitHub Actions cache. This can lead to
|
||||
> a small or empty cache. See [Disable cache pruning](#disable-cache-pruning) for more details.
|
||||
|
||||
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
|
||||
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
|
||||
Caching is enabled by default on GitHub-hosted runners.
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
> On self-hosted runners this is usually not needed since the cache generated by uv on the runner's
|
||||
> filesystem is not removed after a run. For more details see [Local cache path](#local-cache-path).
|
||||
|
||||
You can optionally define a custom cache key suffix.
|
||||
|
||||
```yaml
|
||||
- name: Enable caching and define a custom cache key suffix
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: "optional-suffix"
|
||||
```
|
||||
|
||||
When the cache was successfully restored, the output `cache-hit` will be set to `true` and you can
|
||||
use it in subsequent steps. For example, to use the cache in the above case:
|
||||
|
||||
```yaml
|
||||
- name: Do something if the cache was restored
|
||||
if: steps.setup-uv.outputs.cache-hit == 'true'
|
||||
run: echo "Cache was restored"
|
||||
```
|
||||
|
||||
#### Cache dependency glob
|
||||
|
||||
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
|
||||
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
|
||||
changes. If you use relative paths, they are relative to the repository root.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> You can look up supported patterns [here](https://github.com/actions/toolkit/tree/main/packages/glob#patterns)
|
||||
>
|
||||
> The default is
|
||||
> ```yaml
|
||||
> cache-dependency-glob: |
|
||||
> **/*requirements*.txt
|
||||
> **/*requirements*.in
|
||||
> **/*constraints*.txt
|
||||
> **/*constraints*.in
|
||||
> **/pyproject.toml
|
||||
> **/uv.lock
|
||||
> ```
|
||||
|
||||
```yaml
|
||||
- name: Define a cache dependency glob
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "**/pyproject.toml"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define a list of cache dependency globs
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
**/requirements*.txt
|
||||
**/pyproject.toml
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define an absolute cache dependency glob
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Never invalidate the cache
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: ""
|
||||
```
|
||||
|
||||
### Local cache path
|
||||
|
||||
This action controls where uv stores its cache on the runner's filesystem by setting `UV_CACHE_DIR`.
|
||||
It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\uv-tool-dir` on Windows and
|
||||
`/tmp/setup-uv-cache` on Linux/macOS. You can change the default by specifying the path with the
|
||||
`cache-local-path` input.
|
||||
|
||||
```yaml
|
||||
- name: Define a custom uv cache path
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
cache-local-path: "/path/to/cache"
|
||||
```
|
||||
|
||||
### Disable cache pruning
|
||||
|
||||
By default, the uv cache is pruned after every run, removing pre-built wheels, but retaining any
|
||||
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
|
||||
pre-built wheels from the cache (and instead re-download them from the registry on each run).
|
||||
However, on self-hosted or local runners, preserving the cache may be more efficient. See
|
||||
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
|
||||
more information.
|
||||
|
||||
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
|
||||
input.
|
||||
|
||||
```yaml
|
||||
- name: Don't prune the cache before saving it
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
prune-cache: false
|
||||
```
|
||||
|
||||
### Ignore nothing to cache
|
||||
|
||||
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
|
||||
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore nothing to cache
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
ignore-nothing-to-cache: true
|
||||
```
|
||||
|
||||
### Ignore empty workdir
|
||||
|
||||
By default, the action will warn if the workdir is empty, because this is usually the case when
|
||||
`actions/checkout` is configured to run after `setup-uv`, which is not supported.
|
||||
|
||||
If you want to ignore this, set the `ignore-empty-workdir` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore empty workdir
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
ignore-empty-workdir: true
|
||||
```
|
||||
|
||||
### GitHub authentication token
|
||||
|
||||
This action uses the GitHub API to fetch the uv release artifacts. To avoid hitting the GitHub API
|
||||
rate limit too quickly, an authentication token can be provided via the `github-token` input. By
|
||||
default, the `GITHUB_TOKEN` secret is used, which is automatically provided by GitHub Actions.
|
||||
|
||||
If the default
|
||||
[permissions for the GitHub token](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#permissions-for-the-github_token)
|
||||
are not sufficient, you can provide a custom GitHub token with the necessary permissions.
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom GitHub token
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
|
||||
```
|
||||
|
||||
### UV_TOOL_DIR
|
||||
|
||||
On Windows `UV_TOOL_DIR` is set to `uv-tool-dir` in the `TMP` dir (e.g. `D:\a\_temp\uv-tool-dir`).
|
||||
On GitHub hosted runners this is on the much faster `D:` drive.
|
||||
|
||||
On all other platforms the tool environments are placed in the
|
||||
[default location](https://docs.astral.sh/uv/concepts/tools/#tools-directory).
|
||||
|
||||
If you want to change this behaviour (especially on self-hosted runners) you can use the `tool-dir`
|
||||
input:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool dir
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
tool-dir: "/path/to/tool/dir"
|
||||
```
|
||||
|
||||
### UV_TOOL_BIN_DIR
|
||||
|
||||
On Windows `UV_TOOL_BIN_DIR` is set to `uv-tool-bin-dir` in the `TMP` dir (e.g.
|
||||
`D:\a\_temp\uv-tool-bin-dir`). On GitHub hosted runners this is on the much faster `D:` drive. This
|
||||
path is also automatically added to the PATH.
|
||||
|
||||
On all other platforms the tool binaries get installed to the
|
||||
[default location](https://docs.astral.sh/uv/concepts/tools/#the-bin-directory).
|
||||
|
||||
If you want to change this behaviour (especially on self-hosted runners) you can use the
|
||||
`tool-bin-dir` input:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool bin dir
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
tool-bin-dir: "/path/to/tool-bin/dir"
|
||||
```
|
||||
|
||||
### Tilde Expansion
|
||||
|
||||
This action supports expanding the `~` character to the user's home directory for the following inputs:
|
||||
|
||||
- `version-file`
|
||||
- `cache-local-path`
|
||||
- `tool-dir`
|
||||
- `tool-bin-dir`
|
||||
- `cache-dependency-glob`
|
||||
|
||||
```yaml
|
||||
- name: Expand the tilde character
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
cache-local-path: "~/path/to/cache"
|
||||
tool-dir: "~/path/to/tool/dir"
|
||||
tool-bin-dir: "~/path/to/tool-bin/dir"
|
||||
cache-dependency-glob: "~/my-cache-buster"
|
||||
```
|
||||
|
||||
### Manifest file
|
||||
|
||||
The `manifest-file` input allows you to specify a JSON manifest that lists available uv versions,
|
||||
architectures, and their download URLs. By default, this action uses the manifest file contained
|
||||
in this repository, which is automatically updated with each release of uv.
|
||||
|
||||
The manifest file contains an array of objects, each describing a version,
|
||||
architecture, platform, and the corresponding download URL. For example:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"version": "0.7.13",
|
||||
"artifactName": "uv-aarch64-apple-darwin.tar.gz",
|
||||
"arch": "aarch64",
|
||||
"platform": "apple-darwin",
|
||||
"downloadUrl": "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz"
|
||||
},
|
||||
...
|
||||
]
|
||||
```
|
||||
|
||||
You can supply a custom manifest file URL to define additional versions,
|
||||
architectures, or different download URLs.
|
||||
This is useful if you maintain your own uv builds or want to override the default sources.
|
||||
|
||||
```yaml
|
||||
- name: Use a custom manifest file
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
manifest-file: "https://example.com/my-custom-manifest.json"
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> When you use a custom manifest file and do not set the `version` input, its default value is `latest`.
|
||||
> This means the action will install the latest version available in the custom manifest file.
|
||||
> This is different from the default behavior of installing the latest version from the official uv releases.
|
||||
|
||||
### Add problem matchers
|
||||
|
||||
This action automatically adds
|
||||
[problem matchers](https://github.com/actions/toolkit/blob/main/docs/problem-matchers.md)
|
||||
for python errors.
|
||||
|
||||
You can disable this by setting the `add-problem-matchers` input to `false`.
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv without problem matchers
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
add-problem-matchers: false
|
||||
```
|
||||
- **[Advanced Version Configuration](docs/advanced-version-configuration.md)** - Resolution strategies and version files
|
||||
- **[Caching](docs/caching.md)** - Complete guide to caching configuration
|
||||
- **[Environment and Tools](docs/environment-and-tools.md)** - Environment activation, tool directories, authentication, and environment variables
|
||||
- **[Customization](docs/customization.md)** - Checksum validation, custom manifests, and problem matchers
|
||||
|
||||
## How it works
|
||||
|
||||
This action downloads uv from the uv repo's official
|
||||
[GitHub Releases](https://github.com/astral-sh/uv) and uses the
|
||||
[GitHub Actions Toolkit](https://github.com/actions/toolkit) to cache it as a tool to speed up
|
||||
consecutive runs on self-hosted runners.
|
||||
By default, this action resolves uv versions from
|
||||
[`astral-sh/versions`](https://github.com/astral-sh/versions) (NDJSON) and downloads uv from the
|
||||
official [GitHub Releases](https://github.com/astral-sh/uv).
|
||||
|
||||
It then uses the [GitHub Actions Toolkit](https://github.com/actions/toolkit) to cache uv as a
|
||||
tool to speed up consecutive runs on self-hosted runners.
|
||||
|
||||
The installed version of uv is then added to the runner PATH, enabling later steps to invoke it
|
||||
by name (`uv`).
|
||||
@@ -487,7 +207,8 @@ by name (`uv`).
|
||||
With `setup-uv`, you can install a specific version of Python using `uv python install` rather than
|
||||
relying on `actions/setup-python`.
|
||||
|
||||
Using `actions/setup-python` can be faster, because GitHub caches the Python versions alongside the runner.
|
||||
Using `actions/setup-python` can be faster (~1s), because GitHub includes several Python versions in the runner image
|
||||
which are available to get activated by `actions/setup-python` without having to download them.
|
||||
|
||||
For example:
|
||||
|
||||
@@ -495,7 +216,7 @@ For example:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@main
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Test
|
||||
@@ -507,7 +228,7 @@ To install a specific version of Python, use
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Install Python 3.12
|
||||
@@ -526,7 +247,7 @@ output:
|
||||
uses: actions/checkout@main
|
||||
- name: Install the default version of uv
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
- name: Print the installed version
|
||||
run: echo "Installed uv version is ${{ steps.setup-uv.outputs.uv-version }}"
|
||||
```
|
||||
@@ -535,14 +256,13 @@ output:
|
||||
|
||||
**Yes!**
|
||||
|
||||
The cache key gets computed by using the [cache-dependency-glob](#cache-dependency-glob).
|
||||
The cache key gets computed by using the cache-dependency-glob (see [Caching documentation](docs/caching.md)).
|
||||
|
||||
If you
|
||||
have jobs which use the same dependency definitions from `requirements.txt` or
|
||||
If you have jobs which use the same dependency definitions from `requirements.txt` or
|
||||
`pyproject.toml` but different
|
||||
[resolution strategies](https://docs.astral.sh/uv/concepts/resolution/#resolution-strategy),
|
||||
each job will have different dependencies or dependency versions.
|
||||
But if you do not add the resolution strategy as a [cache-suffix](#enable-caching),
|
||||
But if you do not add the resolution strategy as a cache-suffix (see [Caching documentation](docs/caching.md)),
|
||||
they will have the same cache key.
|
||||
|
||||
This means the first job which starts uploading its cache will win and all other job will fail
|
||||
@@ -555,15 +275,15 @@ You might see errors like
|
||||
### Why do I see warnings like `No GitHub Actions cache found for key`
|
||||
|
||||
When a workflow runs for the first time on a branch and has a new cache key, because the
|
||||
[cache-dependency-glob](#cache-dependency-glob) found changed files (changed dependencies),
|
||||
cache-dependency-glob (see [Caching documentation](docs/caching.md)) found changed files (changed dependencies),
|
||||
the cache will not be found and the warning `No GitHub Actions cache found for key` will be printed.
|
||||
|
||||
While this might be irritating at first, it is expected behaviour and the cache will be created
|
||||
and reused in later workflows.
|
||||
|
||||
The reason for the warning is, that we have to way to know if this is the first run of a new
|
||||
cache key or the user accidentally misconfigured the [cache-dependency-glob](#cache-dependency-glob)
|
||||
or [cache-suffix](#enable-caching) and the cache never gets used.
|
||||
The reason for the warning is that we have to way to know if this is the first run of a new
|
||||
cache key or the user accidentally misconfigured the cache-dependency-glob
|
||||
or cache-suffix (see [Caching documentation](docs/caching.md)) and the cache never gets used.
|
||||
|
||||
### Do I have to run `actions/checkout` before or after `setup-uv`?
|
||||
|
||||
@@ -574,7 +294,7 @@ Running `actions/checkout` after `setup-uv` **is not supported**.
|
||||
|
||||
### Does `setup-uv` also install my project or its dependencies automatically?
|
||||
|
||||
No, `setup-uv` alone wont install any libraries from your `pyproject.toml` or `requirements.txt`, it only sets up `uv`.
|
||||
No, `setup-uv` alone won't install any libraries from your `pyproject.toml` or `requirements.txt`, it only sets up `uv`.
|
||||
You should run `uv sync` or `uv pip install .` separately, or use `uv run ...` to ensure necessary dependencies are installed.
|
||||
|
||||
### Why is a changed cache not detected and not the full cache uploaded?
|
||||
@@ -588,11 +308,11 @@ if an uploaded cache exists for this key.
|
||||
If yes (e.g. contents of `uv.lock` did not change since last run) the dependencies in the cache
|
||||
are up to date and the cache will be downloaded and used.
|
||||
|
||||
Details on determining which files will lead to different caches can be read under
|
||||
[cache-dependency-glob](#cache-dependency-glob)
|
||||
Details on determining which files will lead to different caches can be read in the
|
||||
[Caching documentation](docs/caching.md).
|
||||
|
||||
Some dependencies will never be uploaded to the cache and will be downloaded again on each run
|
||||
as described in [disable-cache-pruning](#disable-cache-pruning)
|
||||
as described in the [Caching documentation](docs/caching.md).
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
|
||||
@@ -4,10 +4,11 @@ import {
|
||||
validateChecksum,
|
||||
} from "../../../src/download/checksum/checksum";
|
||||
|
||||
const validChecksum =
|
||||
"f3da96ec7e995debee7f5d52ecd034dfb7074309a1da42f76429ecb814d813a3";
|
||||
const filePath = "__tests__/fixtures/checksumfile";
|
||||
|
||||
test("checksum should match", async () => {
|
||||
const validChecksum =
|
||||
"f3da96ec7e995debee7f5d52ecd034dfb7074309a1da42f76429ecb814d813a3";
|
||||
const filePath = "__tests__/fixtures/checksumfile";
|
||||
// string params don't matter only test the checksum mechanism, not known checksums
|
||||
await validateChecksum(
|
||||
validChecksum,
|
||||
@@ -18,6 +19,16 @@ test("checksum should match", async () => {
|
||||
);
|
||||
});
|
||||
|
||||
test("provided checksum beats known checksums", async () => {
|
||||
await validateChecksum(
|
||||
validChecksum,
|
||||
filePath,
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
"0.3.0",
|
||||
);
|
||||
});
|
||||
|
||||
type KnownVersionFixture = { version: string; known: boolean };
|
||||
|
||||
it.each<KnownVersionFixture>([
|
||||
@@ -29,9 +40,9 @@ it.each<KnownVersionFixture>([
|
||||
known: false,
|
||||
version: "0.0.15",
|
||||
},
|
||||
])(
|
||||
"isknownVersion should return $known for version $version",
|
||||
({ version, known }) => {
|
||||
expect(isknownVersion(version)).toBe(known);
|
||||
},
|
||||
);
|
||||
])("isknownVersion should return $known for version $version", ({
|
||||
version,
|
||||
known,
|
||||
}) => {
|
||||
expect(isknownVersion(version)).toBe(known);
|
||||
});
|
||||
|
||||
256
__tests__/download/download-version.test.ts
Normal file
256
__tests__/download/download-version.test.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
import { beforeEach, describe, expect, it, jest } from "@jest/globals";
|
||||
|
||||
const mockInfo = jest.fn();
|
||||
const mockWarning = jest.fn();
|
||||
|
||||
jest.mock("@actions/core", () => ({
|
||||
debug: jest.fn(),
|
||||
info: mockInfo,
|
||||
warning: mockWarning,
|
||||
}));
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockDownloadTool = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockExtractTar = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockExtractZip = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockCacheDir = jest.fn<any>();
|
||||
|
||||
jest.mock("@actions/tool-cache", () => {
|
||||
const actual = jest.requireActual("@actions/tool-cache") as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
|
||||
return {
|
||||
...actual,
|
||||
cacheDir: mockCacheDir,
|
||||
downloadTool: mockDownloadTool,
|
||||
extractTar: mockExtractTar,
|
||||
extractZip: mockExtractZip,
|
||||
};
|
||||
});
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockGetLatestVersionFromNdjson = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockGetAllVersionsFromNdjson = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockGetArtifactFromNdjson = jest.fn<any>();
|
||||
|
||||
jest.mock("../../src/download/versions-client", () => ({
|
||||
getAllVersions: mockGetAllVersionsFromNdjson,
|
||||
getArtifact: mockGetArtifactFromNdjson,
|
||||
getLatestVersion: mockGetLatestVersionFromNdjson,
|
||||
}));
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockGetAllManifestVersions = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockGetLatestVersionInManifest = jest.fn<any>();
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockGetManifestArtifact = jest.fn<any>();
|
||||
|
||||
jest.mock("../../src/download/version-manifest", () => ({
|
||||
getAllVersions: mockGetAllManifestVersions,
|
||||
getLatestKnownVersion: mockGetLatestVersionInManifest,
|
||||
getManifestArtifact: mockGetManifestArtifact,
|
||||
}));
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockValidateChecksum = jest.fn<any>();
|
||||
|
||||
jest.mock("../../src/download/checksum/checksum", () => ({
|
||||
validateChecksum: mockValidateChecksum,
|
||||
}));
|
||||
|
||||
import {
|
||||
downloadVersionFromManifest,
|
||||
downloadVersionFromNdjson,
|
||||
resolveVersion,
|
||||
} from "../../src/download/download-version";
|
||||
|
||||
describe("download-version", () => {
|
||||
beforeEach(() => {
|
||||
mockInfo.mockReset();
|
||||
mockWarning.mockReset();
|
||||
mockDownloadTool.mockReset();
|
||||
mockExtractTar.mockReset();
|
||||
mockExtractZip.mockReset();
|
||||
mockCacheDir.mockReset();
|
||||
mockGetLatestVersionFromNdjson.mockReset();
|
||||
mockGetAllVersionsFromNdjson.mockReset();
|
||||
mockGetArtifactFromNdjson.mockReset();
|
||||
mockGetAllManifestVersions.mockReset();
|
||||
mockGetLatestVersionInManifest.mockReset();
|
||||
mockGetManifestArtifact.mockReset();
|
||||
mockValidateChecksum.mockReset();
|
||||
|
||||
mockDownloadTool.mockResolvedValue("/tmp/downloaded");
|
||||
mockExtractTar.mockResolvedValue("/tmp/extracted");
|
||||
mockExtractZip.mockResolvedValue("/tmp/extracted");
|
||||
mockCacheDir.mockResolvedValue("/tmp/cached");
|
||||
});
|
||||
|
||||
describe("resolveVersion", () => {
|
||||
it("uses astral-sh/versions to resolve latest", async () => {
|
||||
mockGetLatestVersionFromNdjson.mockResolvedValue("0.9.26");
|
||||
|
||||
const version = await resolveVersion("latest", undefined);
|
||||
|
||||
expect(version).toBe("0.9.26");
|
||||
expect(mockGetLatestVersionFromNdjson).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("uses astral-sh/versions to resolve available versions", async () => {
|
||||
mockGetAllVersionsFromNdjson.mockResolvedValue(["0.9.26", "0.9.25"]);
|
||||
|
||||
const version = await resolveVersion("^0.9.0", undefined);
|
||||
|
||||
expect(version).toBe("0.9.26");
|
||||
expect(mockGetAllVersionsFromNdjson).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("does not fall back when astral-sh/versions fails", async () => {
|
||||
mockGetLatestVersionFromNdjson.mockRejectedValue(
|
||||
new Error("NDJSON unavailable"),
|
||||
);
|
||||
|
||||
await expect(resolveVersion("latest", undefined)).rejects.toThrow(
|
||||
"NDJSON unavailable",
|
||||
);
|
||||
});
|
||||
|
||||
it("uses manifest-file when provided", async () => {
|
||||
mockGetAllManifestVersions.mockResolvedValue(["0.9.26", "0.9.25"]);
|
||||
|
||||
const version = await resolveVersion(
|
||||
"^0.9.0",
|
||||
"https://example.com/custom.ndjson",
|
||||
);
|
||||
|
||||
expect(version).toBe("0.9.26");
|
||||
expect(mockGetAllManifestVersions).toHaveBeenCalledWith(
|
||||
"https://example.com/custom.ndjson",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("downloadVersionFromNdjson", () => {
|
||||
it("fails when NDJSON metadata lookup fails", async () => {
|
||||
mockGetArtifactFromNdjson.mockRejectedValue(
|
||||
new Error("NDJSON unavailable"),
|
||||
);
|
||||
|
||||
await expect(
|
||||
downloadVersionFromNdjson(
|
||||
"unknown-linux-gnu",
|
||||
"x86_64",
|
||||
"0.9.26",
|
||||
undefined,
|
||||
"token",
|
||||
),
|
||||
).rejects.toThrow("NDJSON unavailable");
|
||||
|
||||
expect(mockDownloadTool).not.toHaveBeenCalled();
|
||||
expect(mockValidateChecksum).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("fails when no matching artifact exists in NDJSON metadata", async () => {
|
||||
mockGetArtifactFromNdjson.mockResolvedValue(undefined);
|
||||
|
||||
await expect(
|
||||
downloadVersionFromNdjson(
|
||||
"unknown-linux-gnu",
|
||||
"x86_64",
|
||||
"0.9.26",
|
||||
undefined,
|
||||
"token",
|
||||
),
|
||||
).rejects.toThrow(
|
||||
"Could not find artifact for version 0.9.26, arch x86_64, platform unknown-linux-gnu in https://raw.githubusercontent.com/astral-sh/versions/main/v1/uv.ndjson .",
|
||||
);
|
||||
|
||||
expect(mockDownloadTool).not.toHaveBeenCalled();
|
||||
expect(mockValidateChecksum).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("uses built-in checksums for default NDJSON downloads", async () => {
|
||||
mockGetArtifactFromNdjson.mockResolvedValue({
|
||||
archiveFormat: "tar.gz",
|
||||
sha256: "ndjson-checksum-that-should-be-ignored",
|
||||
url: "https://example.com/uv.tar.gz",
|
||||
});
|
||||
|
||||
await downloadVersionFromNdjson(
|
||||
"unknown-linux-gnu",
|
||||
"x86_64",
|
||||
"0.9.26",
|
||||
undefined,
|
||||
"token",
|
||||
);
|
||||
|
||||
expect(mockValidateChecksum).toHaveBeenCalledWith(
|
||||
undefined,
|
||||
"/tmp/downloaded",
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
"0.9.26",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("downloadVersionFromManifest", () => {
|
||||
it("uses manifest-file checksum metadata when checksum input is unset", async () => {
|
||||
mockGetManifestArtifact.mockResolvedValue({
|
||||
archiveFormat: "tar.gz",
|
||||
checksum: "manifest-checksum",
|
||||
downloadUrl: "https://example.com/custom-uv.tar.gz",
|
||||
});
|
||||
|
||||
await downloadVersionFromManifest(
|
||||
"https://example.com/custom.ndjson",
|
||||
"unknown-linux-gnu",
|
||||
"x86_64",
|
||||
"0.9.26",
|
||||
"",
|
||||
"token",
|
||||
);
|
||||
|
||||
expect(mockValidateChecksum).toHaveBeenCalledWith(
|
||||
"manifest-checksum",
|
||||
"/tmp/downloaded",
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
"0.9.26",
|
||||
);
|
||||
});
|
||||
|
||||
it("prefers checksum input over manifest-file checksum metadata", async () => {
|
||||
mockGetManifestArtifact.mockResolvedValue({
|
||||
archiveFormat: "tar.gz",
|
||||
checksum: "manifest-checksum",
|
||||
downloadUrl: "https://example.com/custom-uv.tar.gz",
|
||||
});
|
||||
|
||||
await downloadVersionFromManifest(
|
||||
"https://example.com/custom.ndjson",
|
||||
"unknown-linux-gnu",
|
||||
"x86_64",
|
||||
"0.9.26",
|
||||
"user-checksum",
|
||||
"token",
|
||||
);
|
||||
|
||||
expect(mockValidateChecksum).toHaveBeenCalledWith(
|
||||
"user-checksum",
|
||||
"/tmp/downloaded",
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
"0.9.26",
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
136
__tests__/download/version-manifest.test.ts
Normal file
136
__tests__/download/version-manifest.test.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { beforeEach, describe, expect, it, jest } from "@jest/globals";
|
||||
|
||||
const mockWarning = jest.fn();
|
||||
|
||||
jest.mock("@actions/core", () => ({
|
||||
debug: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warning: mockWarning,
|
||||
}));
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockFetch = jest.fn<any>();
|
||||
jest.mock("../../src/utils/fetch", () => ({
|
||||
fetch: mockFetch,
|
||||
}));
|
||||
|
||||
import {
|
||||
clearManifestCache,
|
||||
getAllVersions,
|
||||
getLatestKnownVersion,
|
||||
getManifestArtifact,
|
||||
} from "../../src/download/version-manifest";
|
||||
|
||||
const legacyManifestResponse = JSON.stringify([
|
||||
{
|
||||
arch: "x86_64",
|
||||
artifactName: "uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
downloadUrl:
|
||||
"https://example.com/releases/download/0.7.12-alpha.1/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
platform: "unknown-linux-gnu",
|
||||
version: "0.7.12-alpha.1",
|
||||
},
|
||||
{
|
||||
arch: "x86_64",
|
||||
artifactName: "uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
downloadUrl:
|
||||
"https://example.com/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
platform: "unknown-linux-gnu",
|
||||
version: "0.7.13",
|
||||
},
|
||||
]);
|
||||
|
||||
const ndjsonManifestResponse = `{"version":"0.10.0","artifacts":[{"platform":"x86_64-unknown-linux-gnu","variant":"default","url":"https://example.com/releases/download/0.10.0/uv-x86_64-unknown-linux-gnu.tar.gz","archive_format":"tar.gz","sha256":"checksum-100"}]}
|
||||
{"version":"0.9.30","artifacts":[{"platform":"x86_64-unknown-linux-gnu","variant":"default","url":"https://example.com/releases/download/0.9.30/uv-x86_64-unknown-linux-gnu.tar.gz","archive_format":"tar.gz","sha256":"checksum-0930"}]}`;
|
||||
|
||||
const multiVariantManifestResponse = `{"version":"0.10.0","artifacts":[{"platform":"x86_64-unknown-linux-gnu","variant":"managed-python","url":"https://example.com/releases/download/0.10.0/uv-x86_64-unknown-linux-gnu-managed-python.tar.gz","archive_format":"tar.gz","sha256":"checksum-managed"},{"platform":"x86_64-unknown-linux-gnu","variant":"default","url":"https://example.com/releases/download/0.10.0/uv-x86_64-unknown-linux-gnu-default.zip","archive_format":"zip","sha256":"checksum-default"}]}`;
|
||||
|
||||
function createMockResponse(
|
||||
ok: boolean,
|
||||
status: number,
|
||||
statusText: string,
|
||||
data: string,
|
||||
) {
|
||||
return {
|
||||
ok,
|
||||
status,
|
||||
statusText,
|
||||
text: async () => data,
|
||||
};
|
||||
}
|
||||
|
||||
describe("version-manifest", () => {
|
||||
beforeEach(() => {
|
||||
clearManifestCache();
|
||||
mockFetch.mockReset();
|
||||
mockWarning.mockReset();
|
||||
});
|
||||
|
||||
it("supports the legacy JSON manifest format", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", legacyManifestResponse),
|
||||
);
|
||||
|
||||
const latest = await getLatestKnownVersion(
|
||||
"https://example.com/legacy.json",
|
||||
);
|
||||
const artifact = await getManifestArtifact(
|
||||
"https://example.com/legacy.json",
|
||||
"0.7.13",
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
);
|
||||
|
||||
expect(latest).toBe("0.7.13");
|
||||
expect(artifact).toEqual({
|
||||
archiveFormat: undefined,
|
||||
checksum: undefined,
|
||||
downloadUrl:
|
||||
"https://example.com/releases/download/0.7.13/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
});
|
||||
expect(mockWarning).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("supports NDJSON manifests", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", ndjsonManifestResponse),
|
||||
);
|
||||
|
||||
const versions = await getAllVersions("https://example.com/custom.ndjson");
|
||||
const artifact = await getManifestArtifact(
|
||||
"https://example.com/custom.ndjson",
|
||||
"0.10.0",
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
);
|
||||
|
||||
expect(versions).toEqual(["0.10.0", "0.9.30"]);
|
||||
expect(artifact).toEqual({
|
||||
archiveFormat: "tar.gz",
|
||||
checksum: "checksum-100",
|
||||
downloadUrl:
|
||||
"https://example.com/releases/download/0.10.0/uv-x86_64-unknown-linux-gnu.tar.gz",
|
||||
});
|
||||
expect(mockWarning).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("prefers the default variant when a manifest contains multiple variants", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", multiVariantManifestResponse),
|
||||
);
|
||||
|
||||
const artifact = await getManifestArtifact(
|
||||
"https://example.com/multi-variant.ndjson",
|
||||
"0.10.0",
|
||||
"x86_64",
|
||||
"unknown-linux-gnu",
|
||||
);
|
||||
|
||||
expect(artifact).toEqual({
|
||||
archiveFormat: "zip",
|
||||
checksum: "checksum-default",
|
||||
downloadUrl:
|
||||
"https://example.com/releases/download/0.10.0/uv-x86_64-unknown-linux-gnu-default.zip",
|
||||
});
|
||||
});
|
||||
});
|
||||
169
__tests__/download/versions-client.test.ts
Normal file
169
__tests__/download/versions-client.test.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { beforeEach, describe, expect, it, jest } from "@jest/globals";
|
||||
|
||||
// biome-ignore lint/suspicious/noExplicitAny: Mock requires flexible typing in tests.
|
||||
const mockFetch = jest.fn<any>();
|
||||
jest.mock("../../src/utils/fetch", () => ({
|
||||
fetch: mockFetch,
|
||||
}));
|
||||
|
||||
import {
|
||||
clearCache,
|
||||
fetchVersionData,
|
||||
getAllVersions,
|
||||
getArtifact,
|
||||
getLatestVersion,
|
||||
parseVersionData,
|
||||
} from "../../src/download/versions-client";
|
||||
|
||||
const sampleNdjsonResponse = `{"version":"0.9.26","artifacts":[{"platform":"aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/uv/releases/download/0.9.26/uv-aarch64-apple-darwin.tar.gz","archive_format":"tar.gz","sha256":"fcf0a9ea6599c6ae28a4c854ac6da76f2c889354d7c36ce136ef071f7ab9721f"},{"platform":"x86_64-pc-windows-msvc","variant":"default","url":"https://github.com/astral-sh/uv/releases/download/0.9.26/uv-x86_64-pc-windows-msvc.zip","archive_format":"zip","sha256":"eb02fd95d8e0eed462b4a67ecdd320d865b38c560bffcda9a0b87ec944bdf036"}]}
|
||||
{"version":"0.9.25","artifacts":[{"platform":"aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/uv/releases/download/0.9.25/uv-aarch64-apple-darwin.tar.gz","archive_format":"tar.gz","sha256":"606b3c6949d971709f2526fa0d9f0fd23ccf60e09f117999b406b424af18a6a6"}]}`;
|
||||
|
||||
const multiVariantNdjsonResponse = `{"version":"0.9.26","artifacts":[{"platform":"aarch64-apple-darwin","variant":"python-managed","url":"https://github.com/astral-sh/uv/releases/download/0.9.26/uv-aarch64-apple-darwin-managed.tar.gz","archive_format":"tar.gz","sha256":"managed-checksum"},{"platform":"aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/uv/releases/download/0.9.26/uv-aarch64-apple-darwin.zip","archive_format":"zip","sha256":"default-checksum"}]}`;
|
||||
|
||||
function createMockResponse(
|
||||
ok: boolean,
|
||||
status: number,
|
||||
statusText: string,
|
||||
data: string,
|
||||
) {
|
||||
return {
|
||||
ok,
|
||||
status,
|
||||
statusText,
|
||||
text: async () => data,
|
||||
};
|
||||
}
|
||||
|
||||
describe("versions-client", () => {
|
||||
beforeEach(() => {
|
||||
clearCache();
|
||||
mockFetch.mockReset();
|
||||
});
|
||||
|
||||
describe("fetchVersionData", () => {
|
||||
it("should fetch and parse NDJSON data", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", sampleNdjsonResponse),
|
||||
);
|
||||
|
||||
const versions = await fetchVersionData();
|
||||
|
||||
expect(versions).toHaveLength(2);
|
||||
expect(versions[0].version).toBe("0.9.26");
|
||||
expect(versions[1].version).toBe("0.9.25");
|
||||
});
|
||||
|
||||
it("should throw error on failed fetch", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(false, 500, "Internal Server Error", ""),
|
||||
);
|
||||
|
||||
await expect(fetchVersionData()).rejects.toThrow(
|
||||
"Failed to fetch version data: 500 Internal Server Error",
|
||||
);
|
||||
});
|
||||
|
||||
it("should cache results", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", sampleNdjsonResponse),
|
||||
);
|
||||
|
||||
await fetchVersionData();
|
||||
await fetchVersionData();
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getLatestVersion", () => {
|
||||
it("should return the first version (newest)", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", sampleNdjsonResponse),
|
||||
);
|
||||
|
||||
const latest = await getLatestVersion();
|
||||
|
||||
expect(latest).toBe("0.9.26");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllVersions", () => {
|
||||
it("should return all version strings", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", sampleNdjsonResponse),
|
||||
);
|
||||
|
||||
const versions = await getAllVersions();
|
||||
|
||||
expect(versions).toEqual(["0.9.26", "0.9.25"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getArtifact", () => {
|
||||
beforeEach(() => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", sampleNdjsonResponse),
|
||||
);
|
||||
});
|
||||
|
||||
it("should find artifact by version and platform", async () => {
|
||||
const artifact = await getArtifact("0.9.26", "aarch64", "apple-darwin");
|
||||
|
||||
expect(artifact).toEqual({
|
||||
archiveFormat: "tar.gz",
|
||||
sha256:
|
||||
"fcf0a9ea6599c6ae28a4c854ac6da76f2c889354d7c36ce136ef071f7ab9721f",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.9.26/uv-aarch64-apple-darwin.tar.gz",
|
||||
});
|
||||
});
|
||||
|
||||
it("should find windows artifact", async () => {
|
||||
const artifact = await getArtifact("0.9.26", "x86_64", "pc-windows-msvc");
|
||||
|
||||
expect(artifact).toEqual({
|
||||
archiveFormat: "zip",
|
||||
sha256:
|
||||
"eb02fd95d8e0eed462b4a67ecdd320d865b38c560bffcda9a0b87ec944bdf036",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.9.26/uv-x86_64-pc-windows-msvc.zip",
|
||||
});
|
||||
});
|
||||
|
||||
it("should prefer the default variant when multiple artifacts share a platform", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(true, 200, "OK", multiVariantNdjsonResponse),
|
||||
);
|
||||
|
||||
const artifact = await getArtifact("0.9.26", "aarch64", "apple-darwin");
|
||||
|
||||
expect(artifact).toEqual({
|
||||
archiveFormat: "zip",
|
||||
sha256: "default-checksum",
|
||||
url: "https://github.com/astral-sh/uv/releases/download/0.9.26/uv-aarch64-apple-darwin.zip",
|
||||
});
|
||||
});
|
||||
|
||||
it("should return undefined for unknown version", async () => {
|
||||
const artifact = await getArtifact("0.0.1", "aarch64", "apple-darwin");
|
||||
|
||||
expect(artifact).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined for unknown platform", async () => {
|
||||
const artifact = await getArtifact(
|
||||
"0.9.26",
|
||||
"aarch64",
|
||||
"unknown-linux-musl",
|
||||
);
|
||||
|
||||
expect(artifact).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseVersionData", () => {
|
||||
it("should throw for malformed NDJSON", () => {
|
||||
expect(() =>
|
||||
parseVersionData('{"version":"0.1.0"', "test-source"),
|
||||
).toThrow("Failed to parse version data from test-source");
|
||||
});
|
||||
});
|
||||
});
|
||||
1
__tests__/fixtures/.tool-versions
Normal file
1
__tests__/fixtures/.tool-versions
Normal file
@@ -0,0 +1 @@
|
||||
uv 0.5.15
|
||||
16
__tests__/fixtures/cache-dir-defined-project/pyproject.toml
Normal file
16
__tests__/fixtures/cache-dir-defined-project/pyproject.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[project]
|
||||
name = "uv-project"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"ruff>=0.6.2",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.uv]
|
||||
cache-dir = "/tmp/pyproject-toml-defined-cache-path"
|
||||
@@ -0,0 +1,2 @@
|
||||
def hello() -> str:
|
||||
return "Hello from uv-project!"
|
||||
38
__tests__/fixtures/cache-dir-defined-project/uv.lock
generated
Normal file
38
__tests__/fixtures/cache-dir-defined-project/uv.lock
generated
Normal file
@@ -0,0 +1,38 @@
|
||||
version = 1
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/23/f4/279d044f66b79261fd37df76bf72b64471afab5d3b7906a01499c4451910/ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be", size = 2460281 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/72/4b/47dd7a69287afb4069fa42c198e899463605460a58120196711bfcf0446b/ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c", size = 9695871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/c3/8aac62ac4638c14a740ee76a755a925f2d0d04580ab790a9887accb729f6/ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570", size = 9459354 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/cf/77fbd8d4617b9b9c503f9bffb8552c4e3ea1a58dc36975e7a9104ffb0f85/ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158", size = 9163871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/1c/765192bab32b79efbb498b06f0b9dcb3629112b53b8777ae1d19b8209e09/ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534", size = 10096250 },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/d0/86f3cb0f6934c99f759c232984a5204d67a26745cad2d9edff6248adf7d2/ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b", size = 9475376 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/cc/4c8d0e225b559a3fae6092ec310d7150d3b02b4669e9223f783ef64d82c0/ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d", size = 10295634 },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/96/d2699cfb1bb5a01c68122af43454c76c31331e1c8a9bd97d653d7c82524b/ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66", size = 11024941 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/a9/6ecd66af8929e0f2a1ed308a4137f3521789f28f0eb97d32c2ca3aa7000c/ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8", size = 10606894 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/73/2ee4cd19f44992fedac1cc6db9e3d825966072f6dcbd4032f21cbd063170/ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1", size = 11552886 },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/4c/c0f1cd35ce4a93c54a6bb1ee6934a3a205fa02198dd076678193853ceea1/ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1", size = 10264945 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/89/e45c9359b9cdd4245512ea2b9f2bb128a997feaa5f726fc9e8c7a66afadf/ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23", size = 10100007 },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/74/0bd4e0a7ed5f6908df87892f9bf60a2356c0fd74102d8097298bd9b4f346/ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a", size = 9559267 },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/03/3dc6dc9419f276f05805bf888c279e3e0b631284abd548d9e87cebb93aec/ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c", size = 9905304 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/5b/d6a72a6a6bbf097c09de468326ef5fa1c9e7aa5e6e45979bc0d984b0dbe7/ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56", size = 10341480 },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/a9/0f2f21fe15ba537c46598f96aa9ae4a3d4b9ec64926664617ca6a8c772f4/ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da", size = 7961901 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/80/fff12ffe11853d9f4ea3e5221e6dd2e93640a161c05c9579833e09ad40a7/ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2", size = 8783320 },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/91/577cdd64cce5e74d3f8b5ecb93f29566def569c741eb008aed4f331ef821/ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9", size = 8225886 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uv-project"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "ruff" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "ruff" }]
|
||||
@@ -9,5 +9,5 @@ dependencies = [
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
requires = ["uv_build>=0.9.22,<0.10.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
@@ -1,33 +1,9 @@
|
||||
version = 1
|
||||
requires-python = ">=3.12"
|
||||
revision = 3
|
||||
requires-python = ">=3.8, <=3.9"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/23/f4/279d044f66b79261fd37df76bf72b64471afab5d3b7906a01499c4451910/ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be", size = 2460281 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/72/4b/47dd7a69287afb4069fa42c198e899463605460a58120196711bfcf0446b/ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c", size = 9695871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/c3/8aac62ac4638c14a740ee76a755a925f2d0d04580ab790a9887accb729f6/ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570", size = 9459354 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/cf/77fbd8d4617b9b9c503f9bffb8552c4e3ea1a58dc36975e7a9104ffb0f85/ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158", size = 9163871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/1c/765192bab32b79efbb498b06f0b9dcb3629112b53b8777ae1d19b8209e09/ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534", size = 10096250 },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/d0/86f3cb0f6934c99f759c232984a5204d67a26745cad2d9edff6248adf7d2/ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b", size = 9475376 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/cc/4c8d0e225b559a3fae6092ec310d7150d3b02b4669e9223f783ef64d82c0/ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d", size = 10295634 },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/96/d2699cfb1bb5a01c68122af43454c76c31331e1c8a9bd97d653d7c82524b/ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66", size = 11024941 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/a9/6ecd66af8929e0f2a1ed308a4137f3521789f28f0eb97d32c2ca3aa7000c/ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8", size = 10606894 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/73/2ee4cd19f44992fedac1cc6db9e3d825966072f6dcbd4032f21cbd063170/ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1", size = 11552886 },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/4c/c0f1cd35ce4a93c54a6bb1ee6934a3a205fa02198dd076678193853ceea1/ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1", size = 10264945 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/89/e45c9359b9cdd4245512ea2b9f2bb128a997feaa5f726fc9e8c7a66afadf/ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23", size = 10100007 },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/74/0bd4e0a7ed5f6908df87892f9bf60a2356c0fd74102d8097298bd9b4f346/ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a", size = 9559267 },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/03/3dc6dc9419f276f05805bf888c279e3e0b631284abd548d9e87cebb93aec/ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c", size = 9905304 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/5b/d6a72a6a6bbf097c09de468326ef5fa1c9e7aa5e6e45979bc0d984b0dbe7/ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56", size = 10341480 },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/a9/0f2f21fe15ba537c46598f96aa9ae4a3d4b9ec64926664617ca6a8c772f4/ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da", size = 7961901 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/80/fff12ffe11853d9f4ea3e5221e6dd2e93640a161c05c9579833e09ad40a7/ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2", size = 8783320 },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/91/577cdd64cce5e74d3f8b5ecb93f29566def569c741eb008aed4f331ef821/ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9", size = 8225886 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uv-project"
|
||||
name = "old-python-constraint-project"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
@@ -35,4 +11,30 @@ dependencies = [
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "ruff" }]
|
||||
requires-dist = [{ name = "ruff", specifier = ">=0.6.2" }]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" },
|
||||
]
|
||||
|
||||
210
__tests__/utils/inputs.test.ts
Normal file
210
__tests__/utils/inputs.test.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
jest.mock("@actions/core", () => {
|
||||
return {
|
||||
debug: jest.fn(),
|
||||
getBooleanInput: jest.fn(
|
||||
(name: string) => (mockInputs[name] ?? "") === "true",
|
||||
),
|
||||
getInput: jest.fn((name: string) => mockInputs[name] ?? ""),
|
||||
warning: jest.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
import {
|
||||
afterEach,
|
||||
beforeEach,
|
||||
describe,
|
||||
expect,
|
||||
it,
|
||||
jest,
|
||||
} from "@jest/globals";
|
||||
|
||||
// Will be mutated per test before (re-)importing the module under test
|
||||
let mockInputs: Record<string, string> = {};
|
||||
const ORIGINAL_HOME = process.env.HOME;
|
||||
|
||||
describe("cacheDependencyGlob", () => {
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
jest.clearAllMocks();
|
||||
mockInputs = {};
|
||||
process.env.HOME = "/home/testuser";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.HOME = ORIGINAL_HOME;
|
||||
});
|
||||
|
||||
it("returns empty string when input not provided", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
const { cacheDependencyGlob } = await import("../../src/utils/inputs");
|
||||
expect(cacheDependencyGlob).toBe("");
|
||||
});
|
||||
|
||||
it("resolves a single relative path", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["cache-dependency-glob"] = "requirements.txt";
|
||||
const { cacheDependencyGlob } = await import("../../src/utils/inputs");
|
||||
expect(cacheDependencyGlob).toBe("/workspace/requirements.txt");
|
||||
});
|
||||
|
||||
it("strips leading ./ from relative path", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["cache-dependency-glob"] = "./uv.lock";
|
||||
const { cacheDependencyGlob } = await import("../../src/utils/inputs");
|
||||
expect(cacheDependencyGlob).toBe("/workspace/uv.lock");
|
||||
});
|
||||
|
||||
it("handles multiple lines, trimming whitespace, tilde expansion and absolute paths", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["cache-dependency-glob"] =
|
||||
" ~/.cache/file1\n ./rel/file2 \nfile3.txt";
|
||||
const { cacheDependencyGlob } = await import("../../src/utils/inputs");
|
||||
expect(cacheDependencyGlob).toBe(
|
||||
[
|
||||
"/home/testuser/.cache/file1", // expanded tilde, absolute path unchanged
|
||||
"/workspace/rel/file2", // ./ stripped and resolved
|
||||
"/workspace/file3.txt", // relative path resolved
|
||||
].join("\n"),
|
||||
);
|
||||
});
|
||||
|
||||
it("keeps absolute path unchanged in multiline input", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["cache-dependency-glob"] = "/abs/path.lock\nrelative.lock";
|
||||
const { cacheDependencyGlob } = await import("../../src/utils/inputs");
|
||||
expect(cacheDependencyGlob).toBe(
|
||||
["/abs/path.lock", "/workspace/relative.lock"].join("\n"),
|
||||
);
|
||||
});
|
||||
|
||||
it("handles exclusions in relative paths correct", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["cache-dependency-glob"] = "!/abs/path.lock\n!relative.lock";
|
||||
const { cacheDependencyGlob } = await import("../../src/utils/inputs");
|
||||
expect(cacheDependencyGlob).toBe(
|
||||
["!/abs/path.lock", "!/workspace/relative.lock"].join("\n"),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("tool directories", () => {
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
jest.clearAllMocks();
|
||||
mockInputs = {};
|
||||
process.env.HOME = "/home/testuser";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.HOME = ORIGINAL_HOME;
|
||||
});
|
||||
|
||||
it("expands tilde for tool-bin-dir and tool-dir", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["tool-bin-dir"] = "~/tool-bin-dir";
|
||||
mockInputs["tool-dir"] = "~/tool-dir";
|
||||
|
||||
const { toolBinDir, toolDir } = await import("../../src/utils/inputs");
|
||||
|
||||
expect(toolBinDir).toBe("/home/testuser/tool-bin-dir");
|
||||
expect(toolDir).toBe("/home/testuser/tool-dir");
|
||||
});
|
||||
});
|
||||
|
||||
describe("cacheLocalPath", () => {
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
jest.clearAllMocks();
|
||||
mockInputs = {};
|
||||
process.env.HOME = "/home/testuser";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.HOME = ORIGINAL_HOME;
|
||||
});
|
||||
|
||||
it("expands tilde in cache-local-path", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["cache-local-path"] = "~/uv-cache/cache-local-path";
|
||||
|
||||
const { CacheLocalSource, cacheLocalPath } = await import(
|
||||
"../../src/utils/inputs"
|
||||
);
|
||||
|
||||
expect(cacheLocalPath).toEqual({
|
||||
path: "/home/testuser/uv-cache/cache-local-path",
|
||||
source: CacheLocalSource.Input,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("venvPath", () => {
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
jest.clearAllMocks();
|
||||
mockInputs = {};
|
||||
process.env.HOME = "/home/testuser";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.HOME = ORIGINAL_HOME;
|
||||
});
|
||||
|
||||
it("defaults to .venv in the working directory", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
const { venvPath } = await import("../../src/utils/inputs");
|
||||
expect(venvPath).toBe("/workspace/.venv");
|
||||
});
|
||||
|
||||
it("resolves a relative venv-path", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["activate-environment"] = "true";
|
||||
mockInputs["venv-path"] = "custom-venv";
|
||||
const { venvPath } = await import("../../src/utils/inputs");
|
||||
expect(venvPath).toBe("/workspace/custom-venv");
|
||||
});
|
||||
|
||||
it("normalizes venv-path with trailing slash", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["activate-environment"] = "true";
|
||||
mockInputs["venv-path"] = "custom-venv/";
|
||||
const { venvPath } = await import("../../src/utils/inputs");
|
||||
expect(venvPath).toBe("/workspace/custom-venv");
|
||||
});
|
||||
|
||||
it("keeps an absolute venv-path unchanged", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["activate-environment"] = "true";
|
||||
mockInputs["venv-path"] = "/tmp/custom-venv";
|
||||
const { venvPath } = await import("../../src/utils/inputs");
|
||||
expect(venvPath).toBe("/tmp/custom-venv");
|
||||
});
|
||||
|
||||
it("expands tilde in venv-path", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["activate-environment"] = "true";
|
||||
mockInputs["venv-path"] = "~/.venv";
|
||||
const { venvPath } = await import("../../src/utils/inputs");
|
||||
expect(venvPath).toBe("/home/testuser/.venv");
|
||||
});
|
||||
|
||||
it("warns when venv-path is set but activate-environment is false", async () => {
|
||||
mockInputs["working-directory"] = "/workspace";
|
||||
mockInputs["venv-path"] = "custom-venv";
|
||||
|
||||
const { activateEnvironment, venvPath } = await import(
|
||||
"../../src/utils/inputs"
|
||||
);
|
||||
|
||||
expect(activateEnvironment).toBe(false);
|
||||
expect(venvPath).toBe("/workspace/custom-venv");
|
||||
|
||||
const mockedCore = jest.requireMock("@actions/core") as {
|
||||
warning: jest.Mock;
|
||||
};
|
||||
|
||||
expect(mockedCore.warning).toHaveBeenCalledWith(
|
||||
"venv-path is only used when activate-environment is true",
|
||||
);
|
||||
});
|
||||
});
|
||||
115
__tests__/version/tool-versions-file.test.ts
Normal file
115
__tests__/version/tool-versions-file.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
jest.mock("node:fs");
|
||||
jest.mock("@actions/core", () => ({
|
||||
warning: jest.fn(),
|
||||
}));
|
||||
|
||||
import fs from "node:fs";
|
||||
import * as core from "@actions/core";
|
||||
import { beforeEach, describe, expect, it, jest } from "@jest/globals";
|
||||
import { getUvVersionFromToolVersions } from "../../src/version/tool-versions-file";
|
||||
|
||||
const mockedFs = fs as jest.Mocked<typeof fs>;
|
||||
const mockedCore = core as jest.Mocked<typeof core>;
|
||||
|
||||
describe("getUvVersionFromToolVersions", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should return undefined for non-.tool-versions files", () => {
|
||||
const result = getUvVersionFromToolVersions("package.json");
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockedFs.readFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should return version for valid uv entry", () => {
|
||||
const fileContent = "python 3.11.0\nuv 0.1.0\nnodejs 18.0.0";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBe("0.1.0");
|
||||
expect(mockedFs.readFileSync).toHaveBeenCalledWith(
|
||||
".tool-versions",
|
||||
"utf8",
|
||||
);
|
||||
});
|
||||
|
||||
it("should return version for uv entry with v prefix", () => {
|
||||
const fileContent = "uv v0.2.0";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBe("0.2.0");
|
||||
});
|
||||
|
||||
it("should handle whitespace around uv entry", () => {
|
||||
const fileContent = " uv 0.3.0 ";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBe("0.3.0");
|
||||
});
|
||||
|
||||
it("should skip commented lines", () => {
|
||||
const fileContent = "# uv 0.1.0\npython 3.11.0\nuv 0.2.0";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBe("0.2.0");
|
||||
});
|
||||
|
||||
it("should return first matching uv version", () => {
|
||||
const fileContent = "uv 0.1.0\npython 3.11.0\nuv 0.2.0";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBe("0.1.0");
|
||||
});
|
||||
|
||||
it("should return undefined when no uv entry found", () => {
|
||||
const fileContent = "python 3.11.0\nnodejs 18.0.0";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should return undefined for empty file", () => {
|
||||
mockedFs.readFileSync.mockReturnValue("");
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should warn and return undefined for ref syntax", () => {
|
||||
const fileContent = "uv ref:main";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions(".tool-versions");
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockedCore.warning).toHaveBeenCalledWith(
|
||||
"The ref syntax of .tool-versions is not supported. Please use a released version instead.",
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle file path with .tool-versions extension", () => {
|
||||
const fileContent = "uv 0.1.0";
|
||||
mockedFs.readFileSync.mockReturnValue(fileContent);
|
||||
|
||||
const result = getUvVersionFromToolVersions("path/to/.tool-versions");
|
||||
|
||||
expect(result).toBe("0.1.0");
|
||||
expect(mockedFs.readFileSync).toHaveBeenCalledWith(
|
||||
"path/to/.tool-versions",
|
||||
"utf8",
|
||||
);
|
||||
});
|
||||
});
|
||||
77
action-types.yml
Normal file
77
action-types.yml
Normal file
@@ -0,0 +1,77 @@
|
||||
# See https://github.com/typesafegithub/github-actions-typing
|
||||
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
version-file:
|
||||
type: string
|
||||
python-version:
|
||||
type: string
|
||||
activate-environment:
|
||||
type: boolean
|
||||
venv-path:
|
||||
type: string
|
||||
working-directory:
|
||||
type: string
|
||||
checksum:
|
||||
type: string
|
||||
github-token:
|
||||
type: string
|
||||
enable-cache:
|
||||
type: enum
|
||||
allowed-values:
|
||||
- "true"
|
||||
- "false"
|
||||
- auto
|
||||
cache-dependency-glob:
|
||||
type: list
|
||||
separator: "\n"
|
||||
list-item:
|
||||
type: string
|
||||
restore-cache:
|
||||
type: boolean
|
||||
save-cache:
|
||||
type: boolean
|
||||
cache-suffix:
|
||||
type: string
|
||||
cache-local-path:
|
||||
type: string
|
||||
prune-cache:
|
||||
type: boolean
|
||||
cache-python:
|
||||
type: boolean
|
||||
ignore-nothing-to-cache:
|
||||
type: boolean
|
||||
ignore-empty-workdir:
|
||||
type: boolean
|
||||
tool-dir:
|
||||
type: string
|
||||
tool-bin-dir:
|
||||
type: string
|
||||
manifest-file:
|
||||
type: string
|
||||
add-problem-matchers:
|
||||
type: boolean
|
||||
resolution-strategy:
|
||||
type: enum
|
||||
allowed-values:
|
||||
- highest
|
||||
- lowest
|
||||
|
||||
outputs:
|
||||
uv-version:
|
||||
type: string
|
||||
uv-path:
|
||||
type: string
|
||||
uvx-path:
|
||||
type: string
|
||||
cache-hit:
|
||||
type: boolean
|
||||
cache-key:
|
||||
type: string
|
||||
venv:
|
||||
type: string
|
||||
python-version:
|
||||
type: string
|
||||
python-cache-hit:
|
||||
type: boolean
|
||||
36
action.yml
36
action.yml
@@ -15,19 +15,18 @@ inputs:
|
||||
activate-environment:
|
||||
description: "Use uv venv to activate a venv ready to be used by later steps. "
|
||||
default: "false"
|
||||
venv-path:
|
||||
description: "Custom path for the virtual environment when using activate-environment. Defaults to '.venv' in the working directory."
|
||||
default: ""
|
||||
working-directory:
|
||||
description: "The directory to execute all commands in and look for files such as pyproject.toml"
|
||||
default: ${{ github.workspace }}
|
||||
checksum:
|
||||
description: "The checksum of the uv version to install"
|
||||
required: false
|
||||
server-url:
|
||||
description: "(Deprecated) The server url to use when downloading uv"
|
||||
required: false
|
||||
default: "https://github.com"
|
||||
github-token:
|
||||
description:
|
||||
"Used to increase the rate limit when retrieving versions and downloading uv."
|
||||
"Used when downloading uv from GitHub releases."
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
enable-cache:
|
||||
@@ -35,7 +34,7 @@ inputs:
|
||||
default: "auto"
|
||||
cache-dependency-glob:
|
||||
description:
|
||||
"Glob pattern to match files relative to the repository root to control
|
||||
"Glob pattern to match files relative to the working directory to control
|
||||
the cache."
|
||||
default: |
|
||||
**/*requirements*.txt
|
||||
@@ -44,6 +43,13 @@ inputs:
|
||||
**/*constraints*.in
|
||||
**/pyproject.toml
|
||||
**/uv.lock
|
||||
**/*.py.lock
|
||||
restore-cache:
|
||||
description: "Whether to restore the cache if found."
|
||||
default: "true"
|
||||
save-cache:
|
||||
description: "Whether to save the cache after the run."
|
||||
default: "true"
|
||||
cache-suffix:
|
||||
description: "Suffix for the cache key"
|
||||
required: false
|
||||
@@ -53,6 +59,9 @@ inputs:
|
||||
prune-cache:
|
||||
description: "Prune cache before saving."
|
||||
default: "true"
|
||||
cache-python:
|
||||
description: "Upload managed Python installations to the Github Actions cache."
|
||||
default: "false"
|
||||
ignore-nothing-to-cache:
|
||||
description: "Ignore when nothing is found to cache."
|
||||
default: "false"
|
||||
@@ -66,11 +75,14 @@ inputs:
|
||||
description: "Custom path to set UV_TOOL_BIN_DIR to."
|
||||
required: false
|
||||
manifest-file:
|
||||
description: "URL to the manifest file containing available versions and download URLs."
|
||||
description: "URL to a custom manifest file. Supports the astral-sh/versions NDJSON format and the legacy JSON array format (deprecated)."
|
||||
required: false
|
||||
add-problem-matchers:
|
||||
description: "Add problem matchers."
|
||||
default: "true"
|
||||
resolution-strategy:
|
||||
description: "Resolution strategy to use when resolving version ranges. 'highest' uses the latest compatible version, 'lowest' uses the oldest compatible version."
|
||||
default: "highest"
|
||||
outputs:
|
||||
uv-version:
|
||||
description: "The installed uv version. Useful when using latest."
|
||||
@@ -80,8 +92,16 @@ outputs:
|
||||
description: "The path to the installed uvx binary."
|
||||
cache-hit:
|
||||
description: "A boolean value to indicate a cache entry was found"
|
||||
cache-key:
|
||||
description: "The cache key used for storing/restoring the cache"
|
||||
venv:
|
||||
description: "Path to the activated venv if activate-environment is true"
|
||||
python-version:
|
||||
description: "The Python version that was set."
|
||||
python-cache-hit:
|
||||
description: "A boolean value to indicate the Python cache entry was found"
|
||||
runs:
|
||||
using: "node20"
|
||||
using: "node24"
|
||||
main: "dist/setup/index.js"
|
||||
post: "dist/save-cache/index.js"
|
||||
post-if: success()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.1.4/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.3.7/schema.json",
|
||||
"assist": {
|
||||
"actions": {
|
||||
"source": {
|
||||
@@ -18,7 +18,8 @@
|
||||
"!**/node_modules",
|
||||
"!**/package*.json",
|
||||
"!**/known-checksums.*"
|
||||
]
|
||||
],
|
||||
"maxSize": 2097152
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
|
||||
46643
dist/save-cache/index.js
generated
vendored
46643
dist/save-cache/index.js
generated
vendored
File diff suppressed because one or more lines are too long
63324
dist/setup/index.js
generated
vendored
63324
dist/setup/index.js
generated
vendored
File diff suppressed because one or more lines are too long
43289
dist/update-known-versions/index.js → dist/update-known-checksums/index.js
generated
vendored
43289
dist/update-known-versions/index.js → dist/update-known-checksums/index.js
generated
vendored
File diff suppressed because one or more lines are too long
82
docs/advanced-version-configuration.md
Normal file
82
docs/advanced-version-configuration.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Advanced Version Configuration
|
||||
|
||||
This document covers advanced options for configuring which version of uv to install.
|
||||
|
||||
## Install the latest version
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "latest"
|
||||
```
|
||||
|
||||
## Install a specific version
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version of uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.4.4"
|
||||
```
|
||||
|
||||
## Install a version by supplying a semver range or pep440 specifier
|
||||
|
||||
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
|
||||
or [pep440 specifier](https://peps.python.org/pep-0440/#version-specifiers)
|
||||
to install the latest version that satisfies the range.
|
||||
|
||||
```yaml
|
||||
- name: Install a semver range of uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ">=0.4.0"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Pinning a minor version of uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.4.x"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Install a pep440-specifier-satisfying version of uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ">=0.4.25,<0.5"
|
||||
```
|
||||
|
||||
## Resolution strategy
|
||||
|
||||
By default, when resolving version ranges, setup-uv will install the highest compatible version.
|
||||
You can change this behavior using the `resolution-strategy` input:
|
||||
|
||||
```yaml
|
||||
- name: Install the lowest compatible version of uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ">=0.4.0"
|
||||
resolution-strategy: "lowest"
|
||||
```
|
||||
|
||||
The supported resolution strategies are:
|
||||
- `highest` (default): Install the latest version that satisfies the constraints
|
||||
- `lowest`: Install the oldest version that satisfies the constraints
|
||||
|
||||
This can be useful for testing compatibility with older versions of uv, similar to uv's own `--resolution-strategy` option.
|
||||
|
||||
## Install a version defined in a requirements or config file
|
||||
|
||||
You can use the `version-file` input to specify a file that contains the version of uv to install.
|
||||
This can either be a `pyproject.toml` or `uv.toml` file which defines a `required-version` or
|
||||
uv defined as a dependency in `pyproject.toml` or `requirements.txt`.
|
||||
|
||||
[asdf](https://asdf-vm.com/) `.tool-versions` is also supported, but without the `ref` syntax.
|
||||
|
||||
```yaml
|
||||
- name: Install uv based on the version defined in pyproject.toml
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version-file: "pyproject.toml"
|
||||
```
|
||||
225
docs/caching.md
Normal file
225
docs/caching.md
Normal file
@@ -0,0 +1,225 @@
|
||||
# Caching
|
||||
|
||||
This document covers all caching-related configuration options for setup-uv.
|
||||
|
||||
## Cache key
|
||||
|
||||
The cache key is automatically generated based on:
|
||||
|
||||
- **Architecture**: CPU architecture (e.g., `x86_64`, `aarch64`)
|
||||
- **Platform**: OS platform type (e.g., `unknown-linux-gnu`, `unknown-linux-musl`, `apple-darwin`,
|
||||
`pc-windows-msvc`)
|
||||
- **OS version**: OS name and version (e.g., `ubuntu-22.04`, `macos-14`, `windows-2022`)
|
||||
- **Python version**: The Python version in use
|
||||
- **Cache options**: Whether pruning and Python caching are enabled
|
||||
- **Dependency hash**: Hash of files matching `cache-dependency-glob`
|
||||
- **Suffix**: Optional `cache-suffix` if provided
|
||||
|
||||
Including the OS version ensures that caches are not shared between different OS versions,
|
||||
preventing binary incompatibility issues when runner images change.
|
||||
|
||||
The computed cache key is available as the `cache-key` output:
|
||||
|
||||
```yaml
|
||||
- name: Setup uv
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Print cache key
|
||||
run: echo "Cache key: ${{ steps.setup-uv.outputs.cache-key }}"
|
||||
```
|
||||
|
||||
## Enable caching
|
||||
|
||||
> [!NOTE]
|
||||
> The cache is pruned before it is uploaded to the GitHub Actions cache. This can lead to
|
||||
> a small or empty cache. See [Disable cache pruning](#disable-cache-pruning) for more details.
|
||||
|
||||
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
|
||||
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
|
||||
Caching is enabled by default on GitHub-hosted runners.
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
> On self-hosted runners this is usually not needed since the cache generated by uv on the runner's
|
||||
> filesystem is not removed after a run. For more details see [Local cache path](#local-cache-path).
|
||||
|
||||
You can optionally define a custom cache key suffix.
|
||||
|
||||
```yaml
|
||||
- name: Enable caching and define a custom cache key suffix
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: "optional-suffix"
|
||||
```
|
||||
|
||||
When the cache was successfully restored, the output `cache-hit` will be set to `true` and you can
|
||||
use it in subsequent steps. For example, to use the cache in the above case:
|
||||
|
||||
```yaml
|
||||
- name: Do something if the cache was restored
|
||||
if: steps.setup-uv.outputs.cache-hit == 'true'
|
||||
run: echo "Cache was restored"
|
||||
```
|
||||
|
||||
## Cache dependency glob
|
||||
|
||||
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
|
||||
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
|
||||
changes. If you use relative paths, they are relative to the working directory.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> You can look up supported patterns [here](https://github.com/actions/toolkit/tree/main/packages/glob#patterns)
|
||||
>
|
||||
> The default is
|
||||
> ```yaml
|
||||
> cache-dependency-glob: |
|
||||
> **/*requirements*.txt
|
||||
> **/*requirements*.in
|
||||
> **/*constraints*.txt
|
||||
> **/*constraints*.in
|
||||
> **/pyproject.toml
|
||||
> **/uv.lock
|
||||
> **/*.py.lock
|
||||
> ```
|
||||
|
||||
```yaml
|
||||
- name: Define a cache dependency glob
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "**/pyproject.toml"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define a list of cache dependency globs
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
**/requirements*.txt
|
||||
**/pyproject.toml
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define an absolute cache dependency glob
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Never invalidate the cache
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: ""
|
||||
```
|
||||
|
||||
## Restore cache
|
||||
|
||||
Restoring an existing cache can be enabled or disabled with the `restore-cache` input.
|
||||
By default, the cache will be restored.
|
||||
|
||||
```yaml
|
||||
- name: Don't restore an existing cache
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
restore-cache: false
|
||||
```
|
||||
|
||||
## Save cache
|
||||
|
||||
You can also disable saving the cache after the run with the `save-cache` input.
|
||||
This can be useful to save cache storage when you know you will not use the cache of the run again.
|
||||
By default, the cache will be saved.
|
||||
|
||||
```yaml
|
||||
- name: Don't save the cache after the run
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
save-cache: false
|
||||
```
|
||||
|
||||
## Local cache path
|
||||
|
||||
If caching is enabled, this action controls where uv stores its cache on the runner's filesystem
|
||||
by setting `UV_CACHE_DIR`.
|
||||
|
||||
It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\setup-uv-cache` on Windows and
|
||||
`/tmp/setup-uv-cache` on Linux/macOS. You can change the default by specifying the path with the
|
||||
`cache-local-path` input.
|
||||
|
||||
> [!NOTE]
|
||||
> If the environment variable `UV_CACHE_DIR` is already set this action will not override it.
|
||||
> If you configured [cache-dir](https://docs.astral.sh/uv/reference/settings/#cache-dir) in your
|
||||
> config file then it is also respected and this action will not set `UV_CACHE_DIR`.
|
||||
|
||||
> [!NOTE]
|
||||
> If caching is disabled, you can still use `cache-local-path` so this action sets `UV_CACHE_DIR`
|
||||
> to your desired path.
|
||||
|
||||
```yaml
|
||||
- name: Define a custom uv cache path
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
cache-local-path: "/path/to/cache"
|
||||
```
|
||||
|
||||
## Disable cache pruning
|
||||
|
||||
By default, the uv cache is pruned after every run, removing pre-built wheels, but retaining any
|
||||
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
|
||||
pre-built wheels from the cache (and instead re-download them from the registry on each run).
|
||||
However, on self-hosted or local runners, preserving the cache may be more efficient. See
|
||||
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
|
||||
more information.
|
||||
|
||||
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
|
||||
input.
|
||||
|
||||
```yaml
|
||||
- name: Don't prune the cache before saving it
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
prune-cache: false
|
||||
```
|
||||
|
||||
## Cache Python installs
|
||||
|
||||
By default, the Python install dir (`uv python dir` / `UV_PYTHON_INSTALL_DIR`) is not cached,
|
||||
for the same reason that the dependency cache is pruned.
|
||||
If you want to cache Python installs along with your dependencies, set the `cache-python` input to `true`.
|
||||
|
||||
Note that this only caches Python versions that uv actually installs into `UV_PYTHON_INSTALL_DIR`
|
||||
(i.e. managed Python installs). If uv uses a system Python, there may be nothing to cache.
|
||||
To force managed Python installs, set `UV_PYTHON_PREFERENCE=only-managed`.
|
||||
|
||||
```yaml
|
||||
- name: Cache Python installs
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-python: true
|
||||
```
|
||||
|
||||
## Ignore nothing to cache
|
||||
|
||||
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
|
||||
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore nothing to cache
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
enable-cache: true
|
||||
ignore-nothing-to-cache: true
|
||||
```
|
||||
81
docs/customization.md
Normal file
81
docs/customization.md
Normal file
@@ -0,0 +1,81 @@
|
||||
# Customization
|
||||
|
||||
This document covers advanced customization options including checksum validation, custom manifests, and problem matchers.
|
||||
|
||||
## Validate checksum
|
||||
|
||||
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
|
||||
are automatically verified by this action. The sha256 hashes can be found on the
|
||||
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version and validate the checksum
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.3.1"
|
||||
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
|
||||
```
|
||||
|
||||
## Manifest file
|
||||
|
||||
By default, setup-uv reads version metadata from
|
||||
[`astral-sh/versions`](https://github.com/astral-sh/versions) (NDJSON format).
|
||||
|
||||
The `manifest-file` input lets you override that source with your own URL, for example to test
|
||||
custom uv builds or alternate download locations.
|
||||
|
||||
### Format
|
||||
|
||||
The manifest file must be in NDJSON format, where each line is a JSON object representing a version and its artifacts. For example:
|
||||
|
||||
```json
|
||||
{"version":"0.10.7","artifacts":[{"platform":"x86_64-unknown-linux-gnu","variant":"default","url":"https://example.com/uv-x86_64-unknown-linux-gnu.tar.gz","archive_format":"tar.gz","sha256":"..."}]}
|
||||
{"version":"0.10.6","artifacts":[{"platform":"x86_64-unknown-linux-gnu","variant":"default","url":"https://example.com/uv-x86_64-unknown-linux-gnu.tar.gz","archive_format":"tar.gz","sha256":"..."}]}
|
||||
```
|
||||
|
||||
setup-uv currently only supports `default` as the `variant`.
|
||||
|
||||
The `archive_format` field is currently ignored.
|
||||
|
||||
### Legacy format: JSON array (deprecated)
|
||||
|
||||
The previous JSON array format is still supported for compatibility, but deprecated and will be
|
||||
removed in a future major release.
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"version": "0.7.13",
|
||||
"artifactName": "uv-aarch64-apple-darwin.tar.gz",
|
||||
"arch": "aarch64",
|
||||
"platform": "apple-darwin",
|
||||
"downloadUrl": "https://github.com/astral-sh/uv/releases/download/0.7.13/uv-aarch64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Use a custom manifest file
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
manifest-file: "https://example.com/my-custom-manifest.ndjson"
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> When you use a custom manifest file and do not set the `version` input, setup-uv installs the
|
||||
> latest version from that custom manifest.
|
||||
|
||||
## Add problem matchers
|
||||
|
||||
This action automatically adds
|
||||
[problem matchers](https://github.com/actions/toolkit/blob/main/docs/problem-matchers.md)
|
||||
for python errors.
|
||||
|
||||
You can disable this by setting the `add-problem-matchers` input to `false`.
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv without problem matchers
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
add-problem-matchers: false
|
||||
```
|
||||
160
docs/environment-and-tools.md
Normal file
160
docs/environment-and-tools.md
Normal file
@@ -0,0 +1,160 @@
|
||||
# Environment and Tools
|
||||
|
||||
This document covers environment activation, tool directory configuration, and authentication options.
|
||||
|
||||
## Activate environment
|
||||
|
||||
You can set `activate-environment` to `true` to automatically activate a venv.
|
||||
This allows directly using it in later steps:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv and activate the environment
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
activate-environment: true
|
||||
- run: uv pip install pip
|
||||
```
|
||||
|
||||
By default, the venv is created at `.venv` inside the `working-directory`.
|
||||
|
||||
You can customize the venv location with `venv-path`, for example to place it in the runner temp directory:
|
||||
|
||||
```yaml
|
||||
- uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
activate-environment: true
|
||||
venv-path: ${{ runner.temp }}/custom-venv
|
||||
```
|
||||
|
||||
> [!WARNING]
|
||||
>
|
||||
> Activating the environment adds your dependencies to the `PATH`, which could break some workflows.
|
||||
> For example, if you have a dependency which requires uv, e.g., `hatch`, activating the
|
||||
> environment will shadow the `uv` binary installed by this action and may result in a different uv
|
||||
> version being used.
|
||||
>
|
||||
> We do not recommend using this setting for most use-cases. Instead, use `uv run` to execute
|
||||
> commands in the environment.
|
||||
|
||||
## GitHub authentication token
|
||||
|
||||
By default, this action resolves available uv versions from
|
||||
[`astral-sh/versions`](https://github.com/astral-sh/versions), then downloads uv artifacts from
|
||||
GitHub Releases.
|
||||
|
||||
You can provide a token via `github-token` to authenticate those downloads. By default, the
|
||||
`GITHUB_TOKEN` secret is used, which is automatically provided by GitHub Actions.
|
||||
|
||||
If the default
|
||||
[permissions for the GitHub token](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#permissions-for-the-github_token)
|
||||
are not sufficient, you can provide a custom GitHub token with the necessary permissions.
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom GitHub token
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
|
||||
```
|
||||
|
||||
## UV_TOOL_DIR
|
||||
|
||||
On Windows `UV_TOOL_DIR` is set to `uv-tool-dir` in the `TMP` dir (e.g. `D:\a\_temp\uv-tool-dir`).
|
||||
On GitHub hosted runners this is on the much faster `D:` drive.
|
||||
|
||||
On all other platforms the tool environments are placed in the
|
||||
[default location](https://docs.astral.sh/uv/concepts/tools/#tools-directory).
|
||||
|
||||
If you want to change this behaviour (especially on self-hosted runners) you can use the `tool-dir`
|
||||
input:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool dir
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
tool-dir: "/path/to/tool/dir"
|
||||
```
|
||||
|
||||
## UV_TOOL_BIN_DIR
|
||||
|
||||
On Windows `UV_TOOL_BIN_DIR` is set to `uv-tool-bin-dir` in the `TMP` dir (e.g.
|
||||
`D:\a\_temp\uv-tool-bin-dir`). On GitHub hosted runners this is on the much faster `D:` drive. This
|
||||
path is also automatically added to the PATH.
|
||||
|
||||
On all other platforms the tool binaries get installed to the
|
||||
[default location](https://docs.astral.sh/uv/concepts/tools/#the-bin-directory).
|
||||
|
||||
If you want to change this behaviour (especially on self-hosted runners) you can use the
|
||||
`tool-bin-dir` input:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool bin dir
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
tool-bin-dir: "/path/to/tool-bin/dir"
|
||||
```
|
||||
|
||||
## Tilde Expansion
|
||||
|
||||
This action supports expanding the `~` character to the user's home directory for the following inputs:
|
||||
|
||||
- `version-file`
|
||||
- `cache-local-path`
|
||||
- `tool-dir`
|
||||
- `tool-bin-dir`
|
||||
- `cache-dependency-glob`
|
||||
|
||||
```yaml
|
||||
- name: Expand the tilde character
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
cache-local-path: "~/path/to/cache"
|
||||
tool-dir: "~/path/to/tool/dir"
|
||||
tool-bin-dir: "~/path/to/tool-bin/dir"
|
||||
cache-dependency-glob: "~/my-cache-buster"
|
||||
```
|
||||
|
||||
## Ignore empty workdir
|
||||
|
||||
By default, the action will warn if the workdir is empty, because this is usually the case when
|
||||
`actions/checkout` is configured to run after `setup-uv`, which is not supported.
|
||||
|
||||
If you want to ignore this, set the `ignore-empty-workdir` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore empty workdir
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
ignore-empty-workdir: true
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
This action sets several environment variables that influence uv's behavior and can be used by subsequent steps:
|
||||
|
||||
- `UV_PYTHON`: Set when `python-version` input is specified. Controls which Python version uv uses.
|
||||
- `UV_CACHE_DIR`: Set when caching is enabled (unless already configured in uv config files). Controls where uv stores its cache.
|
||||
- `UV_TOOL_DIR`: Set when `tool-dir` input is specified. Controls where uv installs tool environments.
|
||||
- `UV_TOOL_BIN_DIR`: Set when `tool-bin-dir` input is specified. Controls where uv installs tool binaries.
|
||||
- `UV_PYTHON_INSTALL_DIR`: Always set. Controls where uv installs Python versions.
|
||||
- `VIRTUAL_ENV`: Set when `activate-environment` is true. Points to the activated virtual environment.
|
||||
|
||||
**Environment variables that affect the action behavior:**
|
||||
|
||||
- `UV_NO_MODIFY_PATH`: If set, prevents the action from modifying PATH. Cannot be used with `activate-environment`.
|
||||
- `UV_CACHE_DIR`: If already set, the action will respect it instead of setting its own cache directory.
|
||||
|
||||
```yaml
|
||||
- name: Example using environment variables
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
python-version: "3.12"
|
||||
tool-dir: "/custom/tool/dir"
|
||||
enable-cache: true
|
||||
|
||||
- name: Check environment variables
|
||||
run: |
|
||||
echo "UV_PYTHON: $UV_PYTHON"
|
||||
echo "UV_CACHE_DIR: $UV_CACHE_DIR"
|
||||
echo "UV_TOOL_DIR: $UV_TOOL_DIR"
|
||||
echo "UV_PYTHON_INSTALL_DIR: $UV_PYTHON_INSTALL_DIR"
|
||||
```
|
||||
2211
package-lock.json
generated
2211
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
36
package.json
36
package.json
@@ -7,42 +7,44 @@
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"check": "biome check --write",
|
||||
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-versions src/update-known-versions.ts",
|
||||
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-checksums src/update-known-checksums.ts",
|
||||
"test": "jest",
|
||||
"act": "act pull_request -W .github/workflows/test.yml --container-architecture linux/amd64 -s GITHUB_TOKEN=\"$(gh auth token)\"",
|
||||
"update-known-versions": "RUNNER_TEMP=known_versions node dist/update-known-versions/index.js src/download/checksum/known-versions.ts \"$(gh auth token)\"",
|
||||
"update-known-checksums": "RUNNER_TEMP=known_versions node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts",
|
||||
"all": "npm run build && npm run check && npm run package && npm test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/astral-sh/setup-uv.git"
|
||||
},
|
||||
"keywords": ["actions", "python", "setup", "uv"],
|
||||
"keywords": [
|
||||
"actions",
|
||||
"python",
|
||||
"setup",
|
||||
"uv"
|
||||
],
|
||||
"author": "@eifinger",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^4.0.3",
|
||||
"@actions/cache": "^4.1.0",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.5.0",
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/core": "^7.0.3",
|
||||
"@octokit/plugin-paginate-rest": "^13.1.1",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^16.0.0",
|
||||
"@renovatebot/pep440": "^4.1.0",
|
||||
"smol-toml": "^1.3.4",
|
||||
"undici": "^7.10.0"
|
||||
"@renovatebot/pep440": "^4.2.1",
|
||||
"smol-toml": "^1.6.0",
|
||||
"undici": "5.28.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.1.4",
|
||||
"@biomejs/biome": "2.3.8",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^24.0.14",
|
||||
"@types/semver": "^7.7.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"jest": "^30.0.5",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@vercel/ncc": "^0.38.4",
|
||||
"jest": "^30.2.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"ts-jest": "^29.4.1",
|
||||
"typescript": "^5.8.3"
|
||||
"ts-jest": "^29.4.5",
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
|
||||
128
src/cache/restore-cache.ts
vendored
128
src/cache/restore-cache.ts
vendored
@@ -1,43 +1,82 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import { hashFiles } from "../hash/hash-files";
|
||||
import {
|
||||
cacheDependencyGlob,
|
||||
cacheLocalPath,
|
||||
cachePython,
|
||||
cacheSuffix,
|
||||
pruneCache,
|
||||
pythonVersion as pythonVersionInput,
|
||||
workingDirectory,
|
||||
pythonDir,
|
||||
restoreCache as shouldRestoreCache,
|
||||
} from "../utils/inputs";
|
||||
import { getArch, getPlatform } from "../utils/platforms";
|
||||
import { getArch, getOSNameVersion, getPlatform } from "../utils/platforms";
|
||||
|
||||
export const STATE_CACHE_KEY = "cache-key";
|
||||
export const STATE_CACHE_MATCHED_KEY = "cache-matched-key";
|
||||
const CACHE_VERSION = "1";
|
||||
export const STATE_PYTHON_CACHE_MATCHED_KEY = "python-cache-matched-key";
|
||||
|
||||
export async function restoreCache(): Promise<void> {
|
||||
const cacheKey = await computeKeys();
|
||||
const CACHE_VERSION = "2";
|
||||
|
||||
let matchedKey: string | undefined;
|
||||
core.info(
|
||||
`Trying to restore uv cache from GitHub Actions cache with key: ${cacheKey}`,
|
||||
);
|
||||
try {
|
||||
matchedKey = await cache.restoreCache([cacheLocalPath], cacheKey);
|
||||
} catch (err) {
|
||||
const message = (err as Error).message;
|
||||
core.warning(message);
|
||||
core.setOutput("cache-hit", false);
|
||||
export async function restoreCache(pythonVersion?: string): Promise<void> {
|
||||
const cacheKey = await computeKeys(pythonVersion);
|
||||
core.saveState(STATE_CACHE_KEY, cacheKey);
|
||||
core.setOutput("cache-key", cacheKey);
|
||||
|
||||
if (!shouldRestoreCache) {
|
||||
core.info("restore-cache is false. Skipping restore cache step.");
|
||||
core.setOutput("python-cache-hit", false);
|
||||
return;
|
||||
}
|
||||
|
||||
core.saveState(STATE_CACHE_KEY, cacheKey);
|
||||
if (cacheLocalPath === undefined) {
|
||||
throw new Error(
|
||||
"cache-local-path is not set. Cannot restore cache without a valid cache path.",
|
||||
);
|
||||
}
|
||||
|
||||
handleMatchResult(matchedKey, cacheKey);
|
||||
await restoreCacheFromKey(
|
||||
cacheKey,
|
||||
cacheLocalPath.path,
|
||||
STATE_CACHE_MATCHED_KEY,
|
||||
"cache-hit",
|
||||
);
|
||||
|
||||
if (cachePython) {
|
||||
await restoreCacheFromKey(
|
||||
`${cacheKey}-python`,
|
||||
pythonDir,
|
||||
STATE_PYTHON_CACHE_MATCHED_KEY,
|
||||
"python-cache-hit",
|
||||
);
|
||||
} else {
|
||||
core.setOutput("python-cache-hit", false);
|
||||
}
|
||||
}
|
||||
|
||||
async function computeKeys(): Promise<string> {
|
||||
async function restoreCacheFromKey(
|
||||
cacheKey: string,
|
||||
cachePath: string,
|
||||
stateKey: string,
|
||||
outputKey: string,
|
||||
): Promise<void> {
|
||||
core.info(
|
||||
`Trying to restore cache from GitHub Actions cache with key: ${cacheKey}`,
|
||||
);
|
||||
let matchedKey: string | undefined;
|
||||
try {
|
||||
matchedKey = await cache.restoreCache([cachePath], cacheKey);
|
||||
} catch (err) {
|
||||
const message = (err as Error).message;
|
||||
core.warning(message);
|
||||
core.setOutput(outputKey, false);
|
||||
return;
|
||||
}
|
||||
|
||||
handleMatchResult(matchedKey, cacheKey, stateKey, outputKey);
|
||||
}
|
||||
|
||||
async function computeKeys(pythonVersion?: string): Promise<string> {
|
||||
let cacheDependencyPathHash = "-";
|
||||
if (cacheDependencyGlob !== "") {
|
||||
core.info(
|
||||
@@ -54,56 +93,27 @@ async function computeKeys(): Promise<string> {
|
||||
cacheDependencyPathHash = "-no-dependency-glob";
|
||||
}
|
||||
const suffix = cacheSuffix ? `-${cacheSuffix}` : "";
|
||||
const pythonVersion = await getPythonVersion();
|
||||
const version = pythonVersion ?? "unknown";
|
||||
const platform = await getPlatform();
|
||||
const osNameVersion = getOSNameVersion();
|
||||
const pruned = pruneCache ? "-pruned" : "";
|
||||
return `setup-uv-${CACHE_VERSION}-${getArch()}-${platform}-${pythonVersion}${pruned}${cacheDependencyPathHash}${suffix}`;
|
||||
}
|
||||
|
||||
async function getPythonVersion(): Promise<string> {
|
||||
if (pythonVersionInput !== "") {
|
||||
return pythonVersionInput;
|
||||
}
|
||||
|
||||
let output = "";
|
||||
const options: exec.ExecOptions = {
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString();
|
||||
},
|
||||
},
|
||||
silent: !core.isDebug(),
|
||||
};
|
||||
|
||||
try {
|
||||
const execArgs = ["python", "find", "--directory", workingDirectory];
|
||||
await exec.exec("uv", execArgs, options);
|
||||
const pythonPath = output.trim();
|
||||
|
||||
output = "";
|
||||
await exec.exec(pythonPath, ["--version"], options);
|
||||
// output is like "Python 3.8.10"
|
||||
return output.split(" ")[1].trim();
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
core.debug(`Failed to get python version from uv. Error: ${err.message}`);
|
||||
return "unknown";
|
||||
}
|
||||
const python = cachePython ? "-py" : "";
|
||||
return `setup-uv-${CACHE_VERSION}-${getArch()}-${platform}-${osNameVersion}-${version}${pruned}${python}${cacheDependencyPathHash}${suffix}`;
|
||||
}
|
||||
|
||||
function handleMatchResult(
|
||||
matchedKey: string | undefined,
|
||||
primaryKey: string,
|
||||
stateKey: string,
|
||||
outputKey: string,
|
||||
): void {
|
||||
if (!matchedKey) {
|
||||
core.info(`No GitHub Actions cache found for key: ${primaryKey}`);
|
||||
core.setOutput("cache-hit", false);
|
||||
core.setOutput(outputKey, false);
|
||||
return;
|
||||
}
|
||||
|
||||
core.saveState(STATE_CACHE_MATCHED_KEY, matchedKey);
|
||||
core.info(
|
||||
`uv cache restored from GitHub Actions cache with key: ${matchedKey}`,
|
||||
);
|
||||
core.setOutput("cache-hit", true);
|
||||
core.saveState(stateKey, matchedKey);
|
||||
core.info(`cache restored from GitHub Actions cache with key: ${matchedKey}`);
|
||||
core.setOutput(outputKey, true);
|
||||
}
|
||||
|
||||
@@ -6,33 +6,35 @@ import type { Architecture, Platform } from "../../utils/platforms";
|
||||
import { KNOWN_CHECKSUMS } from "./known-checksums";
|
||||
|
||||
export async function validateChecksum(
|
||||
checkSum: string | undefined,
|
||||
checksum: string | undefined,
|
||||
downloadPath: string,
|
||||
arch: Architecture,
|
||||
platform: Platform,
|
||||
version: string,
|
||||
): Promise<void> {
|
||||
let isValid: boolean | undefined;
|
||||
if (checkSum !== undefined && checkSum !== "") {
|
||||
isValid = await validateFileCheckSum(downloadPath, checkSum);
|
||||
} else {
|
||||
core.debug("Checksum not provided. Checking known checksums.");
|
||||
const key = `${arch}-${platform}-${version}`;
|
||||
if (key in KNOWN_CHECKSUMS) {
|
||||
const knownChecksum = KNOWN_CHECKSUMS[`${arch}-${platform}-${version}`];
|
||||
core.debug(`Checking checksum for ${arch}-${platform}-${version}.`);
|
||||
isValid = await validateFileCheckSum(downloadPath, knownChecksum);
|
||||
} else {
|
||||
core.debug(`No known checksum found for ${key}.`);
|
||||
}
|
||||
const key = `${arch}-${platform}-${version}`;
|
||||
const hasProvidedChecksum = checksum !== undefined && checksum !== "";
|
||||
const checksumToUse = hasProvidedChecksum ? checksum : KNOWN_CHECKSUMS[key];
|
||||
|
||||
if (checksumToUse === undefined) {
|
||||
core.debug(`No checksum found for ${key}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (isValid === false) {
|
||||
throw new Error(`Checksum for ${downloadPath} did not match ${checkSum}.`);
|
||||
}
|
||||
if (isValid === true) {
|
||||
core.debug(`Checksum for ${downloadPath} is valid.`);
|
||||
const checksumSource = hasProvidedChecksum
|
||||
? "provided checksum"
|
||||
: `KNOWN_CHECKSUMS entry for ${key}`;
|
||||
|
||||
core.debug(`Validating checksum using ${checksumSource}.`);
|
||||
const isValid = await validateFileCheckSum(downloadPath, checksumToUse);
|
||||
|
||||
if (!isValid) {
|
||||
throw new Error(
|
||||
`Checksum for ${downloadPath} did not match ${checksumToUse}.`,
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`Checksum for ${downloadPath} is valid.`);
|
||||
}
|
||||
|
||||
async function validateFileCheckSum(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,59 +1,34 @@
|
||||
import { promises as fs } from "node:fs";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import { KNOWN_CHECKSUMS } from "./known-checksums";
|
||||
|
||||
export interface ChecksumEntry {
|
||||
key: string;
|
||||
checksum: string;
|
||||
}
|
||||
|
||||
export async function updateChecksums(
|
||||
filePath: string,
|
||||
downloadUrls: string[],
|
||||
checksumEntries: ChecksumEntry[],
|
||||
): Promise<void> {
|
||||
await fs.rm(filePath);
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: { [key: string]: string } = {\n",
|
||||
);
|
||||
let firstLine = true;
|
||||
for (const downloadUrl of downloadUrls) {
|
||||
const key = getKey(downloadUrl);
|
||||
if (key === undefined) {
|
||||
const deduplicatedEntries = new Map<string, string>();
|
||||
|
||||
for (const entry of checksumEntries) {
|
||||
if (deduplicatedEntries.has(entry.key)) {
|
||||
continue;
|
||||
}
|
||||
const checksum = await getOrDownloadChecksum(key, downloadUrl);
|
||||
if (!firstLine) {
|
||||
await fs.appendFile(filePath, ",\n");
|
||||
}
|
||||
await fs.appendFile(filePath, ` "${key}":\n "${checksum}"`);
|
||||
firstLine = false;
|
||||
}
|
||||
await fs.appendFile(filePath, ",\n};\n");
|
||||
}
|
||||
|
||||
function getKey(downloadUrl: string): string | undefined {
|
||||
// https://github.com/astral-sh/uv/releases/download/0.3.2/uv-aarch64-apple-darwin.tar.gz.sha256
|
||||
const parts = downloadUrl.split("/");
|
||||
const fileName = parts[parts.length - 1];
|
||||
if (fileName.startsWith("source")) {
|
||||
return undefined;
|
||||
deduplicatedEntries.set(entry.key, entry.checksum);
|
||||
}
|
||||
const name = fileName.split(".")[0].split("uv-")[1];
|
||||
const version = parts[parts.length - 2];
|
||||
return `${name}-${version}`;
|
||||
}
|
||||
|
||||
async function getOrDownloadChecksum(
|
||||
key: string,
|
||||
downloadUrl: string,
|
||||
): Promise<string> {
|
||||
let checksum = "";
|
||||
if (key in KNOWN_CHECKSUMS) {
|
||||
checksum = KNOWN_CHECKSUMS[key];
|
||||
} else {
|
||||
const content = await downloadAssetContent(downloadUrl);
|
||||
checksum = content.split(" ")[0].trim();
|
||||
}
|
||||
return checksum;
|
||||
}
|
||||
const body = [...deduplicatedEntries.entries()]
|
||||
.map(([key, checksum]) => ` "${key}":\n "${checksum}"`)
|
||||
.join(",\n");
|
||||
|
||||
async function downloadAssetContent(downloadUrl: string): Promise<string> {
|
||||
const downloadPath = await tc.downloadTool(downloadUrl);
|
||||
const content = await fs.readFile(downloadPath, "utf8");
|
||||
return content;
|
||||
const content =
|
||||
"// AUTOGENERATED_DO_NOT_EDIT\n" +
|
||||
"export const KNOWN_CHECKSUMS: { [key: string]: string } = {\n" +
|
||||
body +
|
||||
(body === "" ? "" : ",\n") +
|
||||
"};\n";
|
||||
|
||||
await fs.writeFile(filePath, content);
|
||||
}
|
||||
|
||||
@@ -3,14 +3,20 @@ import * as path from "node:path";
|
||||
import * as core from "@actions/core";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import * as pep440 from "@renovatebot/pep440";
|
||||
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
|
||||
import { Octokit } from "../utils/octokit";
|
||||
import * as semver from "semver";
|
||||
import { TOOL_CACHE_NAME, VERSIONS_NDJSON_URL } from "../utils/constants";
|
||||
import type { Architecture, Platform } from "../utils/platforms";
|
||||
import { validateChecksum } from "./checksum/checksum";
|
||||
import {
|
||||
getDownloadUrl,
|
||||
getAllVersions as getAllManifestVersions,
|
||||
getLatestKnownVersion as getLatestVersionInManifest,
|
||||
getManifestArtifact,
|
||||
} from "./version-manifest";
|
||||
import {
|
||||
getAllVersions as getAllVersionsFromNdjson,
|
||||
getArtifact as getArtifactFromNdjson,
|
||||
getLatestVersion as getLatestVersionFromNdjson,
|
||||
} from "./versions-client";
|
||||
|
||||
export function tryGetFromToolCache(
|
||||
arch: Architecture,
|
||||
@@ -27,20 +33,26 @@ export function tryGetFromToolCache(
|
||||
return { installedPath, version: resolvedVersion };
|
||||
}
|
||||
|
||||
export async function downloadVersionFromGithub(
|
||||
serverUrl: string,
|
||||
export async function downloadVersionFromNdjson(
|
||||
platform: Platform,
|
||||
arch: Architecture,
|
||||
version: string,
|
||||
checkSum: string | undefined,
|
||||
githubToken: string,
|
||||
): Promise<{ version: string; cachedToolDir: string }> {
|
||||
const artifact = `uv-${arch}-${platform}`;
|
||||
const extension = getExtension(platform);
|
||||
const downloadUrl = `${serverUrl}/${OWNER}/${REPO}/releases/download/${version}/${artifact}${extension}`;
|
||||
const artifact = await getArtifactFromNdjson(version, arch, platform);
|
||||
|
||||
if (!artifact) {
|
||||
throw new Error(
|
||||
`Could not find artifact for version ${version}, arch ${arch}, platform ${platform} in ${VERSIONS_NDJSON_URL} .`,
|
||||
);
|
||||
}
|
||||
|
||||
// For the default astral-sh/versions source, checksum validation relies on
|
||||
// user input or the built-in KNOWN_CHECKSUMS table, not NDJSON sha256 values.
|
||||
return await downloadVersion(
|
||||
downloadUrl,
|
||||
artifact,
|
||||
artifact.url,
|
||||
`uv-${arch}-${platform}`,
|
||||
platform,
|
||||
arch,
|
||||
version,
|
||||
@@ -50,39 +62,32 @@ export async function downloadVersionFromGithub(
|
||||
}
|
||||
|
||||
export async function downloadVersionFromManifest(
|
||||
manifestUrl: string | undefined,
|
||||
manifestUrl: string,
|
||||
platform: Platform,
|
||||
arch: Architecture,
|
||||
version: string,
|
||||
checkSum: string | undefined,
|
||||
githubToken: string,
|
||||
): Promise<{ version: string; cachedToolDir: string }> {
|
||||
const downloadUrl = await getDownloadUrl(
|
||||
const artifact = await getManifestArtifact(
|
||||
manifestUrl,
|
||||
version,
|
||||
arch,
|
||||
platform,
|
||||
);
|
||||
if (!downloadUrl) {
|
||||
core.info(
|
||||
`manifest-file does not contain version ${version}, arch ${arch}, platform ${platform}. Falling back to GitHub releases.`,
|
||||
);
|
||||
return await downloadVersionFromGithub(
|
||||
"https://github.com",
|
||||
platform,
|
||||
arch,
|
||||
version,
|
||||
checkSum,
|
||||
githubToken,
|
||||
if (!artifact) {
|
||||
throw new Error(
|
||||
`manifest-file does not contain version ${version}, arch ${arch}, platform ${platform}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return await downloadVersion(
|
||||
downloadUrl,
|
||||
artifact.downloadUrl,
|
||||
`uv-${arch}-${platform}`,
|
||||
platform,
|
||||
arch,
|
||||
version,
|
||||
checkSum,
|
||||
resolveChecksum(checkSum, artifact.checksum),
|
||||
githubToken,
|
||||
);
|
||||
}
|
||||
@@ -93,7 +98,7 @@ async function downloadVersion(
|
||||
platform: Platform,
|
||||
arch: Architecture,
|
||||
version: string,
|
||||
checkSum: string | undefined,
|
||||
checksum: string | undefined,
|
||||
githubToken: string,
|
||||
): Promise<{ version: string; cachedToolDir: string }> {
|
||||
core.info(`Downloading uv from "${downloadUrl}" ...`);
|
||||
@@ -102,19 +107,29 @@ async function downloadVersion(
|
||||
undefined,
|
||||
githubToken,
|
||||
);
|
||||
await validateChecksum(checkSum, downloadPath, arch, platform, version);
|
||||
await validateChecksum(checksum, downloadPath, arch, platform, version);
|
||||
|
||||
let uvDir: string;
|
||||
const extension = getExtension(platform);
|
||||
if (platform === "pc-windows-msvc") {
|
||||
const fullPathWithExtension = `${downloadPath}${extension}`;
|
||||
await fs.copyFile(downloadPath, fullPathWithExtension);
|
||||
uvDir = await tc.extractZip(fullPathWithExtension);
|
||||
// On windows extracting the zip does not create an intermediate directory
|
||||
// On windows extracting the zip does not create an intermediate directory.
|
||||
try {
|
||||
// Try tar first as it's much faster, but only bsdtar supports zip files,
|
||||
// so this may fail if another tar, like gnu tar, ends up being used.
|
||||
uvDir = await tc.extractTar(downloadPath, undefined, "x");
|
||||
} catch (err) {
|
||||
core.info(
|
||||
`Extracting with tar failed, falling back to zip extraction: ${(err as Error).message}`,
|
||||
);
|
||||
const extension = getExtension(platform);
|
||||
const fullPathWithExtension = `${downloadPath}${extension}`;
|
||||
await fs.copyFile(downloadPath, fullPathWithExtension);
|
||||
uvDir = await tc.extractZip(fullPathWithExtension);
|
||||
}
|
||||
} else {
|
||||
const extractedDir = await tc.extractTar(downloadPath);
|
||||
uvDir = path.join(extractedDir, artifactName);
|
||||
}
|
||||
|
||||
const cachedToolDir = await tc.cacheDir(
|
||||
uvDir,
|
||||
TOOL_CACHE_NAME,
|
||||
@@ -124,112 +139,80 @@ async function downloadVersion(
|
||||
return { cachedToolDir, version: version };
|
||||
}
|
||||
|
||||
function resolveChecksum(
|
||||
checkSum: string | undefined,
|
||||
manifestChecksum?: string,
|
||||
): string | undefined {
|
||||
return checkSum !== undefined && checkSum !== ""
|
||||
? checkSum
|
||||
: manifestChecksum;
|
||||
}
|
||||
|
||||
function getExtension(platform: Platform): string {
|
||||
return platform === "pc-windows-msvc" ? ".zip" : ".tar.gz";
|
||||
}
|
||||
|
||||
export async function resolveVersion(
|
||||
versionInput: string,
|
||||
manifestFile: string | undefined,
|
||||
githubToken: string,
|
||||
manifestUrl: string | undefined,
|
||||
resolutionStrategy: "highest" | "lowest" = "highest",
|
||||
): Promise<string> {
|
||||
core.debug(`Resolving version: ${versionInput}`);
|
||||
let version: string;
|
||||
if (manifestFile) {
|
||||
const isSimpleMinimumVersionSpecifier =
|
||||
versionInput.includes(">") && !versionInput.includes(",");
|
||||
const resolveVersionSpecifierToLatest =
|
||||
isSimpleMinimumVersionSpecifier && resolutionStrategy === "highest";
|
||||
if (resolveVersionSpecifierToLatest) {
|
||||
core.info("Found minimum version specifier, using latest version");
|
||||
}
|
||||
if (manifestUrl !== undefined) {
|
||||
version =
|
||||
versionInput === "latest"
|
||||
? await getLatestVersionInManifest(manifestFile)
|
||||
versionInput === "latest" || resolveVersionSpecifierToLatest
|
||||
? await getLatestVersionInManifest(manifestUrl)
|
||||
: versionInput;
|
||||
} else {
|
||||
version =
|
||||
versionInput === "latest"
|
||||
? await getLatestVersion(githubToken)
|
||||
versionInput === "latest" || resolveVersionSpecifierToLatest
|
||||
? await getLatestVersionFromNdjson()
|
||||
: versionInput;
|
||||
}
|
||||
if (tc.isExplicitVersion(version)) {
|
||||
core.debug(`Version ${version} is an explicit version.`);
|
||||
if (resolveVersionSpecifierToLatest) {
|
||||
if (!pep440.satisfies(version, versionInput)) {
|
||||
throw new Error(`No version found for ${versionInput}`);
|
||||
}
|
||||
}
|
||||
return version;
|
||||
}
|
||||
const availableVersions = await getAvailableVersions(githubToken);
|
||||
|
||||
const availableVersions = await getAvailableVersions(manifestUrl);
|
||||
core.debug(`Available versions: ${availableVersions}`);
|
||||
const resolvedVersion = maxSatisfying(availableVersions, version);
|
||||
const resolvedVersion =
|
||||
resolutionStrategy === "lowest"
|
||||
? minSatisfying(availableVersions, version)
|
||||
: maxSatisfying(availableVersions, version);
|
||||
|
||||
if (resolvedVersion === undefined) {
|
||||
throw new Error(`No version found for ${version}`);
|
||||
}
|
||||
|
||||
return resolvedVersion;
|
||||
}
|
||||
|
||||
async function getAvailableVersions(githubToken: string): Promise<string[]> {
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: githubToken,
|
||||
});
|
||||
return await getReleaseTagNames(octokit);
|
||||
} catch (err) {
|
||||
if ((err as Error).message.includes("Bad credentials")) {
|
||||
core.info(
|
||||
"No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.",
|
||||
);
|
||||
const octokit = new Octokit();
|
||||
return await getReleaseTagNames(octokit);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function getReleaseTagNames(
|
||||
octokit: InstanceType<typeof Octokit>,
|
||||
async function getAvailableVersions(
|
||||
manifestUrl: string | undefined,
|
||||
): Promise<string[]> {
|
||||
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
const releaseTagNames = response.map((release) => release.tag_name);
|
||||
if (releaseTagNames.length === 0) {
|
||||
throw Error(
|
||||
"Github API request failed while getting releases. Check the GitHub status page for outages. Try again later.",
|
||||
if (manifestUrl !== undefined) {
|
||||
core.info(
|
||||
`Getting available versions from manifest-file ${manifestUrl} ...`,
|
||||
);
|
||||
}
|
||||
return releaseTagNames;
|
||||
}
|
||||
|
||||
async function getLatestVersion(githubToken: string) {
|
||||
core.debug("Getting latest version...");
|
||||
const octokit = new Octokit({
|
||||
auth: githubToken,
|
||||
});
|
||||
|
||||
let latestRelease: { tag_name: string } | undefined;
|
||||
try {
|
||||
latestRelease = await getLatestRelease(octokit);
|
||||
} catch (err) {
|
||||
if ((err as Error).message.includes("Bad credentials")) {
|
||||
core.info(
|
||||
"No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.",
|
||||
);
|
||||
const octokit = new Octokit();
|
||||
latestRelease = await getLatestRelease(octokit);
|
||||
} else {
|
||||
core.error(
|
||||
"Github API request failed while getting latest release. Check the GitHub status page for outages. Try again later.",
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
return await getAllManifestVersions(manifestUrl);
|
||||
}
|
||||
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not determine latest release.");
|
||||
}
|
||||
core.debug(`Latest version: ${latestRelease.tag_name}`);
|
||||
return latestRelease.tag_name;
|
||||
}
|
||||
|
||||
async function getLatestRelease(octokit: InstanceType<typeof Octokit>) {
|
||||
const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
return latestRelease;
|
||||
core.info(`Getting available versions from ${VERSIONS_NDJSON_URL} ...`);
|
||||
return await getAllVersionsFromNdjson();
|
||||
}
|
||||
|
||||
function maxSatisfying(
|
||||
@@ -250,3 +233,24 @@ function maxSatisfying(
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function minSatisfying(
|
||||
versions: string[],
|
||||
version: string,
|
||||
): string | undefined {
|
||||
// For semver, we need to use a different approach since tc.evaluateVersions only returns max
|
||||
// Let's use semver directly for min satisfying
|
||||
const minSemver = semver.minSatisfying(versions, version);
|
||||
if (minSemver !== null) {
|
||||
core.debug(`Found a version that satisfies the semver range: ${minSemver}`);
|
||||
return minSemver;
|
||||
}
|
||||
const minPep440 = pep440.minSatisfying(versions, version);
|
||||
if (minPep440 !== null) {
|
||||
core.debug(
|
||||
`Found a version that satisfies the pep440 specifier: ${minPep440}`,
|
||||
);
|
||||
return minPep440;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
80
src/download/legacy-version-manifest.ts
Normal file
80
src/download/legacy-version-manifest.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import * as core from "@actions/core";
|
||||
|
||||
export interface ManifestEntry {
|
||||
arch: string;
|
||||
platform: string;
|
||||
version: string;
|
||||
downloadUrl: string;
|
||||
checksum?: string;
|
||||
variant?: string;
|
||||
archiveFormat?: string;
|
||||
}
|
||||
|
||||
interface LegacyManifestEntry {
|
||||
arch: string;
|
||||
platform: string;
|
||||
version: string;
|
||||
downloadUrl: string;
|
||||
checksum?: string;
|
||||
}
|
||||
|
||||
const warnedLegacyManifestUrls = new Set<string>();
|
||||
|
||||
export function parseLegacyManifestEntries(
|
||||
parsedEntries: unknown[],
|
||||
manifestUrl: string,
|
||||
): ManifestEntry[] {
|
||||
warnAboutLegacyManifestFormat(manifestUrl);
|
||||
|
||||
return parsedEntries.map((entry, index) => {
|
||||
if (!isLegacyManifestEntry(entry)) {
|
||||
throw new Error(
|
||||
`Invalid legacy manifest-file entry at index ${index} in ${manifestUrl}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
arch: entry.arch,
|
||||
checksum: entry.checksum,
|
||||
downloadUrl: entry.downloadUrl,
|
||||
platform: entry.platform,
|
||||
version: entry.version,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function clearLegacyManifestWarnings(): void {
|
||||
warnedLegacyManifestUrls.clear();
|
||||
}
|
||||
|
||||
function warnAboutLegacyManifestFormat(manifestUrl: string): void {
|
||||
if (warnedLegacyManifestUrls.has(manifestUrl)) {
|
||||
return;
|
||||
}
|
||||
|
||||
warnedLegacyManifestUrls.add(manifestUrl);
|
||||
core.warning(
|
||||
`manifest-file ${manifestUrl} uses the legacy JSON array format, which is deprecated. Please migrate to the astral-sh/versions NDJSON format before the next major release.`,
|
||||
);
|
||||
}
|
||||
|
||||
function isLegacyManifestEntry(value: unknown): value is LegacyManifestEntry {
|
||||
if (!isRecord(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const checksumIsValid =
|
||||
typeof value.checksum === "string" || value.checksum === undefined;
|
||||
|
||||
return (
|
||||
typeof value.arch === "string" &&
|
||||
checksumIsValid &&
|
||||
typeof value.downloadUrl === "string" &&
|
||||
typeof value.platform === "string" &&
|
||||
typeof value.version === "string"
|
||||
);
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null;
|
||||
}
|
||||
39
src/download/variant-selection.ts
Normal file
39
src/download/variant-selection.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
interface VariantAwareEntry {
|
||||
variant?: string;
|
||||
}
|
||||
|
||||
export function selectDefaultVariant<T extends VariantAwareEntry>(
|
||||
entries: T[],
|
||||
duplicateEntryDescription: string,
|
||||
): T {
|
||||
const firstEntry = entries[0];
|
||||
if (firstEntry === undefined) {
|
||||
throw new Error("selectDefaultVariant requires at least one candidate.");
|
||||
}
|
||||
|
||||
if (entries.length === 1) {
|
||||
return firstEntry;
|
||||
}
|
||||
|
||||
const defaultEntries = entries.filter((entry) =>
|
||||
isDefaultVariant(entry.variant),
|
||||
);
|
||||
if (defaultEntries.length === 1) {
|
||||
return defaultEntries[0];
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`${duplicateEntryDescription} with variants ${formatVariants(entries)}. setup-uv currently requires a single default variant for duplicate platform entries.`,
|
||||
);
|
||||
}
|
||||
|
||||
function isDefaultVariant(variant: string | undefined): boolean {
|
||||
return variant === undefined || variant === "default";
|
||||
}
|
||||
|
||||
function formatVariants<T extends VariantAwareEntry>(entries: T[]): string {
|
||||
return entries
|
||||
.map((entry) => entry.variant ?? "default")
|
||||
.sort((left, right) => left.localeCompare(right))
|
||||
.join(", ");
|
||||
}
|
||||
@@ -1,91 +1,169 @@
|
||||
import { promises as fs } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import * as core from "@actions/core";
|
||||
import * as semver from "semver";
|
||||
import { fetch } from "../utils/fetch";
|
||||
import {
|
||||
clearLegacyManifestWarnings,
|
||||
type ManifestEntry,
|
||||
parseLegacyManifestEntries,
|
||||
} from "./legacy-version-manifest";
|
||||
import { selectDefaultVariant } from "./variant-selection";
|
||||
import { type NdjsonVersion, parseVersionData } from "./versions-client";
|
||||
|
||||
const localManifestFile = join(__dirname, "..", "..", "version-manifest.json");
|
||||
|
||||
interface ManifestEntry {
|
||||
version: string;
|
||||
artifactName: string;
|
||||
arch: string;
|
||||
platform: string;
|
||||
export interface ManifestArtifact {
|
||||
downloadUrl: string;
|
||||
checksum?: string;
|
||||
archiveFormat?: string;
|
||||
}
|
||||
|
||||
const cachedManifestEntries = new Map<string, ManifestEntry[]>();
|
||||
|
||||
export async function getLatestKnownVersion(
|
||||
manifestUrl: string | undefined,
|
||||
manifestUrl: string,
|
||||
): Promise<string> {
|
||||
const manifestEntries = await getManifestEntries(manifestUrl);
|
||||
return manifestEntries.reduce((a, b) =>
|
||||
semver.gt(a.version, b.version) ? a : b,
|
||||
).version;
|
||||
const versions = await getAllVersions(manifestUrl);
|
||||
const latestVersion = versions.reduce((latest, current) =>
|
||||
semver.gt(current, latest) ? current : latest,
|
||||
);
|
||||
|
||||
return latestVersion;
|
||||
}
|
||||
|
||||
export async function getDownloadUrl(
|
||||
manifestUrl: string | undefined,
|
||||
export async function getAllVersions(manifestUrl: string): Promise<string[]> {
|
||||
const manifestEntries = await getManifestEntries(manifestUrl);
|
||||
return [...new Set(manifestEntries.map((entry) => entry.version))];
|
||||
}
|
||||
|
||||
export async function getManifestArtifact(
|
||||
manifestUrl: string,
|
||||
version: string,
|
||||
arch: string,
|
||||
platform: string,
|
||||
): Promise<string | undefined> {
|
||||
): Promise<ManifestArtifact | undefined> {
|
||||
const manifestEntries = await getManifestEntries(manifestUrl);
|
||||
const entry = manifestEntries.find(
|
||||
(entry) =>
|
||||
entry.version === version &&
|
||||
entry.arch === arch &&
|
||||
entry.platform === platform,
|
||||
const entry = selectManifestEntry(
|
||||
manifestEntries,
|
||||
manifestUrl,
|
||||
version,
|
||||
arch,
|
||||
platform,
|
||||
);
|
||||
return entry ? entry.downloadUrl : undefined;
|
||||
|
||||
if (!entry) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
archiveFormat: entry.archiveFormat,
|
||||
checksum: entry.checksum,
|
||||
downloadUrl: entry.downloadUrl,
|
||||
};
|
||||
}
|
||||
|
||||
export function clearManifestCache(): void {
|
||||
cachedManifestEntries.clear();
|
||||
clearLegacyManifestWarnings();
|
||||
}
|
||||
|
||||
async function getManifestEntries(
|
||||
manifestUrl: string | undefined,
|
||||
): Promise<ManifestEntry[]> {
|
||||
let data: string;
|
||||
if (manifestUrl !== undefined) {
|
||||
core.info(`Fetching manifest-file from: ${manifestUrl}`);
|
||||
const response = await fetch(manifestUrl, {});
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch manifest-file: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
data = await response.text();
|
||||
} else {
|
||||
core.info("manifest-file not provided, reading from local file.");
|
||||
const fileContent = await fs.readFile(localManifestFile);
|
||||
data = fileContent.toString();
|
||||
}
|
||||
|
||||
return JSON.parse(data);
|
||||
}
|
||||
|
||||
export async function updateVersionManifest(
|
||||
manifestUrl: string,
|
||||
downloadUrls: string[],
|
||||
): Promise<void> {
|
||||
const manifest: ManifestEntry[] = [];
|
||||
|
||||
for (const downloadUrl of downloadUrls) {
|
||||
const urlParts = downloadUrl.split("/");
|
||||
const version = urlParts[urlParts.length - 2];
|
||||
const artifactName = urlParts[urlParts.length - 1];
|
||||
if (!artifactName.startsWith("uv-")) {
|
||||
continue;
|
||||
}
|
||||
if (artifactName.startsWith("uv-installer")) {
|
||||
continue;
|
||||
}
|
||||
const artifactParts = artifactName.split(".")[0].split("-");
|
||||
manifest.push({
|
||||
arch: artifactParts[1],
|
||||
artifactName: artifactName,
|
||||
downloadUrl: downloadUrl,
|
||||
platform: artifactName.split(`uv-${artifactParts[1]}-`)[1].split(".")[0],
|
||||
version: version,
|
||||
});
|
||||
): Promise<ManifestEntry[]> {
|
||||
const cachedEntries = cachedManifestEntries.get(manifestUrl);
|
||||
if (cachedEntries !== undefined) {
|
||||
core.debug(`Using cached manifest-file from: ${manifestUrl}`);
|
||||
return cachedEntries;
|
||||
}
|
||||
|
||||
core.info(`Fetching manifest-file from: ${manifestUrl}`);
|
||||
const response = await fetch(manifestUrl, {});
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch manifest-file: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const data = await response.text();
|
||||
const parsedEntries = parseManifestEntries(data, manifestUrl);
|
||||
cachedManifestEntries.set(manifestUrl, parsedEntries);
|
||||
|
||||
return parsedEntries;
|
||||
}
|
||||
|
||||
function parseManifestEntries(
|
||||
data: string,
|
||||
manifestUrl: string,
|
||||
): ManifestEntry[] {
|
||||
const trimmed = data.trim();
|
||||
if (trimmed === "") {
|
||||
throw new Error(`manifest-file at ${manifestUrl} is empty.`);
|
||||
}
|
||||
|
||||
const parsedAsJson = tryParseJson(trimmed);
|
||||
if (Array.isArray(parsedAsJson)) {
|
||||
return parseLegacyManifestEntries(parsedAsJson, manifestUrl);
|
||||
}
|
||||
|
||||
const versions = parseVersionData(trimmed, manifestUrl);
|
||||
return mapNdjsonVersionsToManifestEntries(versions, manifestUrl);
|
||||
}
|
||||
|
||||
function mapNdjsonVersionsToManifestEntries(
|
||||
versions: NdjsonVersion[],
|
||||
manifestUrl: string,
|
||||
): ManifestEntry[] {
|
||||
const manifestEntries: ManifestEntry[] = [];
|
||||
|
||||
for (const versionData of versions) {
|
||||
for (const artifact of versionData.artifacts) {
|
||||
const [arch, ...platformParts] = artifact.platform.split("-");
|
||||
if (arch === undefined || platformParts.length === 0) {
|
||||
throw new Error(
|
||||
`Invalid artifact platform '${artifact.platform}' in manifest-file ${manifestUrl}.`,
|
||||
);
|
||||
}
|
||||
|
||||
manifestEntries.push({
|
||||
arch,
|
||||
archiveFormat: artifact.archive_format,
|
||||
checksum: artifact.sha256,
|
||||
downloadUrl: artifact.url,
|
||||
platform: platformParts.join("-"),
|
||||
variant: artifact.variant,
|
||||
version: versionData.version,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return manifestEntries;
|
||||
}
|
||||
|
||||
function selectManifestEntry(
|
||||
manifestEntries: ManifestEntry[],
|
||||
manifestUrl: string,
|
||||
version: string,
|
||||
arch: string,
|
||||
platform: string,
|
||||
): ManifestEntry | undefined {
|
||||
const matches = manifestEntries.filter(
|
||||
(candidate) =>
|
||||
candidate.version === version &&
|
||||
candidate.arch === arch &&
|
||||
candidate.platform === platform,
|
||||
);
|
||||
|
||||
if (matches.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return selectDefaultVariant(
|
||||
matches,
|
||||
`manifest-file ${manifestUrl} contains multiple artifacts for version ${version}, arch ${arch}, platform ${platform}`,
|
||||
);
|
||||
}
|
||||
|
||||
function tryParseJson(value: string): unknown {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
core.debug(`Updating manifest-file: ${JSON.stringify(manifest)}`);
|
||||
await fs.writeFile(manifestUrl, JSON.stringify(manifest));
|
||||
}
|
||||
|
||||
191
src/download/versions-client.ts
Normal file
191
src/download/versions-client.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import * as core from "@actions/core";
|
||||
import { VERSIONS_NDJSON_URL } from "../utils/constants";
|
||||
import { fetch } from "../utils/fetch";
|
||||
import { selectDefaultVariant } from "./variant-selection";
|
||||
|
||||
export interface NdjsonArtifact {
|
||||
platform: string;
|
||||
variant?: string;
|
||||
url: string;
|
||||
archive_format: string;
|
||||
sha256: string;
|
||||
}
|
||||
|
||||
export interface NdjsonVersion {
|
||||
version: string;
|
||||
artifacts: NdjsonArtifact[];
|
||||
}
|
||||
|
||||
export interface ArtifactResult {
|
||||
url: string;
|
||||
sha256: string;
|
||||
archiveFormat: string;
|
||||
}
|
||||
|
||||
const cachedVersionData = new Map<string, NdjsonVersion[]>();
|
||||
|
||||
export async function fetchVersionData(
|
||||
url: string = VERSIONS_NDJSON_URL,
|
||||
): Promise<NdjsonVersion[]> {
|
||||
const cachedVersions = cachedVersionData.get(url);
|
||||
if (cachedVersions !== undefined) {
|
||||
core.debug(`Using cached NDJSON version data from ${url}`);
|
||||
return cachedVersions;
|
||||
}
|
||||
|
||||
core.info(`Fetching version data from ${url} ...`);
|
||||
const response = await fetch(url, {});
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch version data: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const body = await response.text();
|
||||
const versions = parseVersionData(body, url);
|
||||
cachedVersionData.set(url, versions);
|
||||
return versions;
|
||||
}
|
||||
|
||||
export function parseVersionData(
|
||||
data: string,
|
||||
sourceDescription: string,
|
||||
): NdjsonVersion[] {
|
||||
const versions: NdjsonVersion[] = [];
|
||||
|
||||
for (const [index, line] of data.split("\n").entries()) {
|
||||
const trimmed = line.trim();
|
||||
if (trimmed === "") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(trimmed);
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse version data from ${sourceDescription} at line ${index + 1}: ${(error as Error).message}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!isNdjsonVersion(parsed)) {
|
||||
throw new Error(
|
||||
`Invalid NDJSON record in ${sourceDescription} at line ${index + 1}.`,
|
||||
);
|
||||
}
|
||||
|
||||
versions.push(parsed);
|
||||
}
|
||||
|
||||
if (versions.length === 0) {
|
||||
throw new Error(`No version data found in ${sourceDescription}.`);
|
||||
}
|
||||
|
||||
return versions;
|
||||
}
|
||||
|
||||
export async function getLatestVersion(): Promise<string> {
|
||||
const versions = await fetchVersionData();
|
||||
const latestVersion = versions[0]?.version;
|
||||
if (!latestVersion) {
|
||||
throw new Error("No versions found in NDJSON data");
|
||||
}
|
||||
|
||||
core.debug(`Latest version from NDJSON: ${latestVersion}`);
|
||||
return latestVersion;
|
||||
}
|
||||
|
||||
export async function getAllVersions(): Promise<string[]> {
|
||||
const versions = await fetchVersionData();
|
||||
return versions.map((versionData) => versionData.version);
|
||||
}
|
||||
|
||||
export async function getArtifact(
|
||||
version: string,
|
||||
arch: string,
|
||||
platform: string,
|
||||
): Promise<ArtifactResult | undefined> {
|
||||
const versions = await fetchVersionData();
|
||||
const versionData = versions.find(
|
||||
(candidate) => candidate.version === version,
|
||||
);
|
||||
if (!versionData) {
|
||||
core.debug(`Version ${version} not found in NDJSON data`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const targetPlatform = `${arch}-${platform}`;
|
||||
const matchingArtifacts = versionData.artifacts.filter(
|
||||
(candidate) => candidate.platform === targetPlatform,
|
||||
);
|
||||
|
||||
if (matchingArtifacts.length === 0) {
|
||||
core.debug(
|
||||
`Artifact for ${targetPlatform} not found in version ${version}. Available platforms: ${versionData.artifacts
|
||||
.map((candidate) => candidate.platform)
|
||||
.join(", ")}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const artifact = selectArtifact(matchingArtifacts, version, targetPlatform);
|
||||
|
||||
return {
|
||||
archiveFormat: artifact.archive_format,
|
||||
sha256: artifact.sha256,
|
||||
url: artifact.url,
|
||||
};
|
||||
}
|
||||
|
||||
export function clearCache(url?: string): void {
|
||||
if (url === undefined) {
|
||||
cachedVersionData.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
cachedVersionData.delete(url);
|
||||
}
|
||||
|
||||
function selectArtifact(
|
||||
artifacts: NdjsonArtifact[],
|
||||
version: string,
|
||||
targetPlatform: string,
|
||||
): NdjsonArtifact {
|
||||
return selectDefaultVariant(
|
||||
artifacts,
|
||||
`Multiple artifacts found for ${targetPlatform} in version ${version}`,
|
||||
);
|
||||
}
|
||||
|
||||
function isNdjsonVersion(value: unknown): value is NdjsonVersion {
|
||||
if (!isRecord(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof value.version !== "string" || !Array.isArray(value.artifacts)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return value.artifacts.every(isNdjsonArtifact);
|
||||
}
|
||||
|
||||
function isNdjsonArtifact(value: unknown): value is NdjsonArtifact {
|
||||
if (!isRecord(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const variantIsValid =
|
||||
typeof value.variant === "string" || value.variant === undefined;
|
||||
|
||||
return (
|
||||
typeof value.archive_format === "string" &&
|
||||
typeof value.platform === "string" &&
|
||||
typeof value.sha256 === "string" &&
|
||||
typeof value.url === "string" &&
|
||||
variantIsValid
|
||||
);
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null;
|
||||
}
|
||||
@@ -2,21 +2,34 @@ import * as fs from "node:fs";
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as pep440 from "@renovatebot/pep440";
|
||||
import {
|
||||
STATE_CACHE_KEY,
|
||||
STATE_CACHE_MATCHED_KEY,
|
||||
STATE_PYTHON_CACHE_MATCHED_KEY,
|
||||
} from "./cache/restore-cache";
|
||||
import { STATE_UV_PATH, STATE_UV_VERSION } from "./utils/constants";
|
||||
import {
|
||||
cacheLocalPath,
|
||||
cachePython,
|
||||
enableCache,
|
||||
ignoreNothingToCache,
|
||||
pythonDir,
|
||||
pruneCache as shouldPruneCache,
|
||||
saveCache as shouldSaveCache,
|
||||
} from "./utils/inputs";
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
try {
|
||||
if (enableCache) {
|
||||
await saveCache();
|
||||
if (shouldSaveCache) {
|
||||
await saveCache();
|
||||
} else {
|
||||
core.info("save-cache is false. Skipping save cache step.");
|
||||
}
|
||||
// https://github.com/nodejs/node/issues/56645#issuecomment-3077594952
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
@@ -40,45 +53,102 @@ async function saveCache(): Promise<void> {
|
||||
}
|
||||
if (matchedKey === cacheKey) {
|
||||
core.info(`Cache hit occurred on key ${cacheKey}, not saving cache.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (shouldPruneCache) {
|
||||
await pruneCache();
|
||||
}
|
||||
|
||||
core.info(`Saving cache path: ${cacheLocalPath}`);
|
||||
if (!fs.existsSync(cacheLocalPath) && !ignoreNothingToCache) {
|
||||
throw new Error(
|
||||
`Cache path ${cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
|
||||
);
|
||||
}
|
||||
try {
|
||||
await cache.saveCache([cacheLocalPath], cacheKey);
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
} catch (e) {
|
||||
if (
|
||||
e instanceof Error &&
|
||||
e.message ===
|
||||
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved."
|
||||
) {
|
||||
core.info(
|
||||
"No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.",
|
||||
);
|
||||
} else {
|
||||
throw e;
|
||||
} else {
|
||||
if (shouldPruneCache) {
|
||||
await pruneCache();
|
||||
}
|
||||
|
||||
const actualCachePath = getUvCachePath();
|
||||
if (!fs.existsSync(actualCachePath)) {
|
||||
if (ignoreNothingToCache) {
|
||||
core.info(
|
||||
"No cacheable uv cache paths were found. Ignoring because ignore-nothing-to-cache is enabled.",
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Cache path ${actualCachePath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
await saveCacheToKey(
|
||||
cacheKey,
|
||||
actualCachePath,
|
||||
STATE_CACHE_MATCHED_KEY,
|
||||
"uv cache",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (cachePython) {
|
||||
if (!fs.existsSync(pythonDir)) {
|
||||
core.warning(
|
||||
`Python cache path ${pythonDir} does not exist on disk. Skipping Python cache save because no managed Python installation was found. If you want uv to install managed Python instead of using a system interpreter, set UV_PYTHON_PREFERENCE=only-managed.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const pythonCacheKey = `${cacheKey}-python`;
|
||||
await saveCacheToKey(
|
||||
pythonCacheKey,
|
||||
pythonDir,
|
||||
STATE_PYTHON_CACHE_MATCHED_KEY,
|
||||
"Python cache",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function pruneCache(): Promise<void> {
|
||||
const forceSupported = pep440.gte(core.getState(STATE_UV_VERSION), "0.8.24");
|
||||
|
||||
const options: exec.ExecOptions = {
|
||||
silent: !core.isDebug(),
|
||||
silent: false,
|
||||
};
|
||||
const execArgs = ["cache", "prune", "--ci"];
|
||||
if (forceSupported) {
|
||||
execArgs.push("--force");
|
||||
}
|
||||
|
||||
core.info("Pruning cache...");
|
||||
await exec.exec("uv", execArgs, options);
|
||||
const uvPath = core.getState(STATE_UV_PATH);
|
||||
await exec.exec(uvPath, execArgs, options);
|
||||
}
|
||||
|
||||
function getUvCachePath(): string {
|
||||
if (cacheLocalPath === undefined) {
|
||||
throw new Error(
|
||||
"cache-local-path is not set. Cannot save cache without a valid cache path.",
|
||||
);
|
||||
}
|
||||
if (
|
||||
process.env.UV_CACHE_DIR &&
|
||||
process.env.UV_CACHE_DIR !== cacheLocalPath.path
|
||||
) {
|
||||
core.warning(
|
||||
`The environment variable UV_CACHE_DIR has been changed to "${process.env.UV_CACHE_DIR}", by an action or step running after astral-sh/setup-uv. This can lead to unexpected behavior. If you expected this to happen set the cache-local-path input to "${process.env.UV_CACHE_DIR}" instead of "${cacheLocalPath.path}".`,
|
||||
);
|
||||
return process.env.UV_CACHE_DIR;
|
||||
}
|
||||
return cacheLocalPath.path;
|
||||
}
|
||||
|
||||
async function saveCacheToKey(
|
||||
cacheKey: string,
|
||||
cachePath: string,
|
||||
stateKey: string,
|
||||
cacheName: string,
|
||||
): Promise<void> {
|
||||
const matchedKey = core.getState(stateKey);
|
||||
|
||||
if (matchedKey === cacheKey) {
|
||||
core.info(
|
||||
`${cacheName} hit occurred on key ${cacheKey}, not saving cache.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
core.info(`Including ${cacheName} path: ${cachePath}`);
|
||||
await cache.saveCache([cachePath], cacheKey);
|
||||
core.info(`${cacheName} saved with key: ${cacheKey}`);
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
174
src/setup-uv.ts
174
src/setup-uv.ts
@@ -4,24 +4,28 @@ import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import { restoreCache } from "./cache/restore-cache";
|
||||
import {
|
||||
downloadVersionFromGithub,
|
||||
downloadVersionFromManifest,
|
||||
downloadVersionFromNdjson,
|
||||
resolveVersion,
|
||||
tryGetFromToolCache,
|
||||
} from "./download/download-version";
|
||||
import { STATE_UV_PATH, STATE_UV_VERSION } from "./utils/constants";
|
||||
import {
|
||||
activateEnvironment as activateEnvironmentInput,
|
||||
addProblemMatchers,
|
||||
CacheLocalSource,
|
||||
cacheLocalPath,
|
||||
checkSum,
|
||||
enableCache,
|
||||
githubToken,
|
||||
ignoreEmptyWorkdir,
|
||||
manifestFile,
|
||||
pythonDir,
|
||||
pythonVersion,
|
||||
serverUrl,
|
||||
resolutionStrategy,
|
||||
toolBinDir,
|
||||
toolDir,
|
||||
venvPath,
|
||||
versionFile as versionFileInput,
|
||||
version as versionInput,
|
||||
workingDirectory,
|
||||
@@ -34,6 +38,37 @@ import {
|
||||
} from "./utils/platforms";
|
||||
import { getUvVersionFromFile } from "./version/resolve";
|
||||
|
||||
async function getPythonVersion(): Promise<string> {
|
||||
if (pythonVersion !== "") {
|
||||
return pythonVersion;
|
||||
}
|
||||
|
||||
let output = "";
|
||||
const options: exec.ExecOptions = {
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString();
|
||||
},
|
||||
},
|
||||
silent: !core.isDebug(),
|
||||
};
|
||||
|
||||
try {
|
||||
const execArgs = ["python", "find", "--directory", workingDirectory];
|
||||
await exec.exec("uv", execArgs, options);
|
||||
const pythonPath = output.trim();
|
||||
|
||||
output = "";
|
||||
await exec.exec(pythonPath, ["--version"], options);
|
||||
// output is like "Python 3.8.10"
|
||||
return output.split(" ")[1].trim();
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
core.debug(`Failed to get python version from uv. Error: ${err.message}`);
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
async function run(): Promise<void> {
|
||||
detectEmptyWorkdir();
|
||||
const platform = await getPlatform();
|
||||
@@ -51,17 +86,24 @@ async function run(): Promise<void> {
|
||||
addToolBinToPath();
|
||||
addUvToPathAndOutput(setupResult.uvDir);
|
||||
setToolDir();
|
||||
addPythonDirToPath();
|
||||
setupPython();
|
||||
await activateEnvironment();
|
||||
addMatchers();
|
||||
setCacheDir(cacheLocalPath);
|
||||
setCacheDir();
|
||||
|
||||
core.setOutput("uv-version", setupResult.version);
|
||||
core.saveState(STATE_UV_VERSION, setupResult.version);
|
||||
core.info(`Successfully installed uv version ${setupResult.version}`);
|
||||
|
||||
const pythonVersion = await getPythonVersion();
|
||||
core.setOutput("python-version", pythonVersion);
|
||||
|
||||
if (enableCache) {
|
||||
await restoreCache();
|
||||
await restoreCache(pythonVersion);
|
||||
}
|
||||
// https://github.com/nodejs/node/issues/56645#issuecomment-3077594952
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
core.setFailed((err as Error).message);
|
||||
@@ -69,7 +111,7 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
function detectEmptyWorkdir(): void {
|
||||
if (fs.readdirSync(".").length === 0) {
|
||||
if (fs.readdirSync(workingDirectory).length === 0) {
|
||||
if (ignoreEmptyWorkdir) {
|
||||
core.info(
|
||||
"Empty workdir detected. Ignoring because ignore-empty-workdir is enabled",
|
||||
@@ -98,29 +140,23 @@ async function setupUv(
|
||||
};
|
||||
}
|
||||
|
||||
let downloadVersionResult: { version: string; cachedToolDir: string };
|
||||
if (serverUrl !== "https://github.com") {
|
||||
core.warning(
|
||||
"The input server-url is deprecated. Please use manifest-file instead.",
|
||||
);
|
||||
downloadVersionResult = await downloadVersionFromGithub(
|
||||
serverUrl,
|
||||
platform,
|
||||
arch,
|
||||
resolvedVersion,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
} else {
|
||||
downloadVersionResult = await downloadVersionFromManifest(
|
||||
manifestFile,
|
||||
platform,
|
||||
arch,
|
||||
resolvedVersion,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
}
|
||||
const downloadVersionResult =
|
||||
manifestFile !== undefined
|
||||
? await downloadVersionFromManifest(
|
||||
manifestFile,
|
||||
platform,
|
||||
arch,
|
||||
resolvedVersion,
|
||||
checkSum,
|
||||
githubToken,
|
||||
)
|
||||
: await downloadVersionFromNdjson(
|
||||
platform,
|
||||
arch,
|
||||
resolvedVersion,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
|
||||
return {
|
||||
uvDir: downloadVersionResult.cachedToolDir,
|
||||
@@ -132,7 +168,7 @@ async function determineVersion(
|
||||
manifestFile: string | undefined,
|
||||
): Promise<string> {
|
||||
if (versionInput !== "") {
|
||||
return await resolveVersion(versionInput, manifestFile, githubToken);
|
||||
return await resolveVersion(versionInput, manifestFile, resolutionStrategy);
|
||||
}
|
||||
if (versionFileInput !== "") {
|
||||
const versionFromFile = getUvVersionFromFile(versionFileInput);
|
||||
@@ -141,7 +177,11 @@ async function determineVersion(
|
||||
`Could not determine uv version from file: ${versionFileInput}`,
|
||||
);
|
||||
}
|
||||
return await resolveVersion(versionFromFile, manifestFile, githubToken);
|
||||
return await resolveVersion(
|
||||
versionFromFile,
|
||||
manifestFile,
|
||||
resolutionStrategy,
|
||||
);
|
||||
}
|
||||
const versionFromUvToml = getUvVersionFromFile(
|
||||
`${workingDirectory}${path.sep}uv.toml`,
|
||||
@@ -157,24 +197,37 @@ async function determineVersion(
|
||||
return await resolveVersion(
|
||||
versionFromUvToml || versionFromPyproject || "latest",
|
||||
manifestFile,
|
||||
githubToken,
|
||||
resolutionStrategy,
|
||||
);
|
||||
}
|
||||
|
||||
function addUvToPathAndOutput(cachedPath: string): void {
|
||||
core.setOutput("uv-path", `${cachedPath}${path.sep}uv`);
|
||||
core.saveState(STATE_UV_PATH, `${cachedPath}${path.sep}uv`);
|
||||
core.setOutput("uvx-path", `${cachedPath}${path.sep}uvx`);
|
||||
core.addPath(cachedPath);
|
||||
core.info(`Added ${cachedPath} to the path`);
|
||||
if (process.env.UV_NO_MODIFY_PATH !== undefined) {
|
||||
core.info("UV_NO_MODIFY_PATH is set, not modifying PATH");
|
||||
} else {
|
||||
core.addPath(cachedPath);
|
||||
core.info(`Added ${cachedPath} to the path`);
|
||||
}
|
||||
}
|
||||
|
||||
function addToolBinToPath(): void {
|
||||
if (toolBinDir !== undefined) {
|
||||
core.exportVariable("UV_TOOL_BIN_DIR", toolBinDir);
|
||||
core.info(`Set UV_TOOL_BIN_DIR to ${toolBinDir}`);
|
||||
core.addPath(toolBinDir);
|
||||
core.info(`Added ${toolBinDir} to the path`);
|
||||
if (process.env.UV_NO_MODIFY_PATH !== undefined) {
|
||||
core.info(`UV_NO_MODIFY_PATH is set, not adding ${toolBinDir} to path`);
|
||||
} else {
|
||||
core.addPath(toolBinDir);
|
||||
core.info(`Added ${toolBinDir} to the path`);
|
||||
}
|
||||
} else {
|
||||
if (process.env.UV_NO_MODIFY_PATH !== undefined) {
|
||||
core.info("UV_NO_MODIFY_PATH is set, not adding user local bin to path");
|
||||
return;
|
||||
}
|
||||
if (process.env.XDG_BIN_HOME !== undefined) {
|
||||
core.addPath(process.env.XDG_BIN_HOME);
|
||||
core.info(`Added ${process.env.XDG_BIN_HOME} to the path`);
|
||||
@@ -195,6 +248,17 @@ function setToolDir(): void {
|
||||
}
|
||||
}
|
||||
|
||||
function addPythonDirToPath(): void {
|
||||
core.exportVariable("UV_PYTHON_INSTALL_DIR", pythonDir);
|
||||
core.info(`Set UV_PYTHON_INSTALL_DIR to ${pythonDir}`);
|
||||
if (process.env.UV_NO_MODIFY_PATH !== undefined) {
|
||||
core.info("UV_NO_MODIFY_PATH is set, not adding python dir to path");
|
||||
} else {
|
||||
core.addPath(pythonDir);
|
||||
core.info(`Added ${pythonDir} to the path`);
|
||||
}
|
||||
}
|
||||
|
||||
function setupPython(): void {
|
||||
if (pythonVersion !== "") {
|
||||
core.exportVariable("UV_PYTHON", pythonVersion);
|
||||
@@ -204,26 +268,42 @@ function setupPython(): void {
|
||||
|
||||
async function activateEnvironment(): Promise<void> {
|
||||
if (activateEnvironmentInput) {
|
||||
const execArgs = ["venv", ".venv", "--directory", workingDirectory];
|
||||
if (process.env.UV_NO_MODIFY_PATH !== undefined) {
|
||||
throw new Error(
|
||||
"UV_NO_MODIFY_PATH and activate-environment cannot be used together.",
|
||||
);
|
||||
}
|
||||
|
||||
core.info("Activating python venv...");
|
||||
await exec.exec("uv", execArgs);
|
||||
core.info(`Creating and activating python venv at ${venvPath}...`);
|
||||
await exec.exec("uv", [
|
||||
"venv",
|
||||
venvPath,
|
||||
"--directory",
|
||||
workingDirectory,
|
||||
"--clear",
|
||||
]);
|
||||
|
||||
let venvBinPath = `${workingDirectory}${path.sep}.venv${path.sep}bin`;
|
||||
let venvBinPath = `${venvPath}${path.sep}bin`;
|
||||
if (process.platform === "win32") {
|
||||
venvBinPath = `${workingDirectory}${path.sep}.venv${path.sep}Scripts`;
|
||||
venvBinPath = `${venvPath}${path.sep}Scripts`;
|
||||
}
|
||||
core.addPath(path.resolve(venvBinPath));
|
||||
core.exportVariable(
|
||||
"VIRTUAL_ENV",
|
||||
path.resolve(`${workingDirectory}${path.sep}.venv`),
|
||||
);
|
||||
core.exportVariable("VIRTUAL_ENV", venvPath);
|
||||
core.setOutput("venv", venvPath);
|
||||
}
|
||||
}
|
||||
|
||||
function setCacheDir(cacheLocalPath: string): void {
|
||||
core.exportVariable("UV_CACHE_DIR", cacheLocalPath);
|
||||
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`);
|
||||
function setCacheDir(): void {
|
||||
if (cacheLocalPath !== undefined) {
|
||||
if (cacheLocalPath.source === CacheLocalSource.Config) {
|
||||
core.info(
|
||||
"Using cache-dir from uv config file, not modifying UV_CACHE_DIR",
|
||||
);
|
||||
return;
|
||||
}
|
||||
core.exportVariable("UV_CACHE_DIR", cacheLocalPath.path);
|
||||
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath.path}`);
|
||||
}
|
||||
}
|
||||
|
||||
function addMatchers(): void {
|
||||
|
||||
81
src/update-known-checksums.ts
Normal file
81
src/update-known-checksums.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as semver from "semver";
|
||||
import { KNOWN_CHECKSUMS } from "./download/checksum/known-checksums";
|
||||
import {
|
||||
type ChecksumEntry,
|
||||
updateChecksums,
|
||||
} from "./download/checksum/update-known-checksums";
|
||||
import {
|
||||
fetchVersionData,
|
||||
getLatestVersion,
|
||||
type NdjsonVersion,
|
||||
} from "./download/versions-client";
|
||||
|
||||
const VERSION_IN_CHECKSUM_KEY_PATTERN =
|
||||
/-(\d+\.\d+\.\d+(?:[-+][0-9A-Za-z.-]+)?)$/;
|
||||
|
||||
async function run(): Promise<void> {
|
||||
const checksumFilePath = process.argv.slice(2)[0];
|
||||
if (!checksumFilePath) {
|
||||
throw new Error(
|
||||
"Missing checksum file path. Usage: node dist/update-known-checksums/index.js <checksum-file-path>",
|
||||
);
|
||||
}
|
||||
|
||||
const latestVersion = await getLatestVersion();
|
||||
const latestKnownVersion = getLatestKnownVersionFromChecksums();
|
||||
|
||||
if (semver.lte(latestVersion, latestKnownVersion)) {
|
||||
core.info(
|
||||
`Latest release (${latestVersion}) is not newer than the latest known version (${latestKnownVersion}). Skipping update.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const versions = await fetchVersionData();
|
||||
const checksumEntries = extractChecksumsFromNdjson(versions);
|
||||
await updateChecksums(checksumFilePath, checksumEntries);
|
||||
|
||||
core.setOutput("latest-version", latestVersion);
|
||||
}
|
||||
|
||||
function getLatestKnownVersionFromChecksums(): string {
|
||||
const versions = new Set<string>();
|
||||
|
||||
for (const key of Object.keys(KNOWN_CHECKSUMS)) {
|
||||
const version = extractVersionFromChecksumKey(key);
|
||||
if (version !== undefined) {
|
||||
versions.add(version);
|
||||
}
|
||||
}
|
||||
|
||||
const latestVersion = [...versions].sort(semver.rcompare)[0];
|
||||
if (!latestVersion) {
|
||||
throw new Error("Could not determine latest known version from checksums.");
|
||||
}
|
||||
|
||||
return latestVersion;
|
||||
}
|
||||
|
||||
function extractVersionFromChecksumKey(key: string): string | undefined {
|
||||
return key.match(VERSION_IN_CHECKSUM_KEY_PATTERN)?.[1];
|
||||
}
|
||||
|
||||
function extractChecksumsFromNdjson(
|
||||
versions: NdjsonVersion[],
|
||||
): ChecksumEntry[] {
|
||||
const checksums: ChecksumEntry[] = [];
|
||||
|
||||
for (const version of versions) {
|
||||
for (const artifact of version.artifacts) {
|
||||
checksums.push({
|
||||
checksum: artifact.sha256,
|
||||
key: `${artifact.platform}-${version.version}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return checksums;
|
||||
}
|
||||
|
||||
run();
|
||||
@@ -1,56 +0,0 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as semver from "semver";
|
||||
import { updateChecksums } from "./download/checksum/update-known-checksums";
|
||||
import {
|
||||
getLatestKnownVersion,
|
||||
updateVersionManifest,
|
||||
} from "./download/version-manifest";
|
||||
import { OWNER, REPO } from "./utils/constants";
|
||||
import { Octokit } from "./utils/octokit";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
const checksumFilePath = process.argv.slice(2)[0];
|
||||
const versionsManifestFile = process.argv.slice(2)[1];
|
||||
const githubToken = process.argv.slice(2)[2];
|
||||
|
||||
const octokit = new Octokit({
|
||||
auth: githubToken,
|
||||
});
|
||||
|
||||
const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
|
||||
const latestKnownVersion = await getLatestKnownVersion(undefined);
|
||||
|
||||
if (semver.lte(latestRelease.tag_name, latestKnownVersion)) {
|
||||
core.info(
|
||||
`Latest release (${latestRelease.tag_name}) is not newer than the latest known version (${latestKnownVersion}). Skipping update.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const releases = await octokit.paginate(octokit.rest.repos.listReleases, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
const checksumDownloadUrls: string[] = releases.flatMap((release) =>
|
||||
release.assets
|
||||
.filter((asset) => asset.name.endsWith(".sha256"))
|
||||
.map((asset) => asset.browser_download_url),
|
||||
);
|
||||
await updateChecksums(checksumFilePath, checksumDownloadUrls);
|
||||
|
||||
const artifactDownloadUrls: string[] = releases.flatMap((release) =>
|
||||
release.assets
|
||||
.filter((asset) => !asset.name.endsWith(".sha256"))
|
||||
.map((asset) => asset.browser_download_url),
|
||||
);
|
||||
|
||||
await updateVersionManifest(versionsManifestFile, artifactDownloadUrls);
|
||||
|
||||
core.setOutput("latest-version", latestRelease.tag_name);
|
||||
}
|
||||
|
||||
run();
|
||||
24
src/utils/config-file.ts
Normal file
24
src/utils/config-file.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import fs from "node:fs";
|
||||
import * as toml from "smol-toml";
|
||||
|
||||
export function getConfigValueFromTomlFile(
|
||||
filePath: string,
|
||||
key: string,
|
||||
): string | undefined {
|
||||
if (!fs.existsSync(filePath) || !filePath.endsWith(".toml")) {
|
||||
return undefined;
|
||||
}
|
||||
const fileContent = fs.readFileSync(filePath, "utf-8");
|
||||
|
||||
if (filePath.endsWith("pyproject.toml")) {
|
||||
const tomlContent = toml.parse(fileContent) as {
|
||||
tool?: { uv?: Record<string, string | undefined> };
|
||||
};
|
||||
return tomlContent?.tool?.uv?.[key];
|
||||
}
|
||||
const tomlContent = toml.parse(fileContent) as Record<
|
||||
string,
|
||||
string | undefined
|
||||
>;
|
||||
return tomlContent[key];
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
export const REPO = "uv";
|
||||
export const OWNER = "astral-sh";
|
||||
export const TOOL_CACHE_NAME = "uv";
|
||||
export const STATE_UV_PATH = "uv-path";
|
||||
export const STATE_UV_VERSION = "uv-version";
|
||||
export const VERSIONS_NDJSON_URL =
|
||||
"https://raw.githubusercontent.com/astral-sh/versions/main/v1/uv.ndjson";
|
||||
|
||||
@@ -1,28 +1,41 @@
|
||||
import path from "node:path";
|
||||
import * as core from "@actions/core";
|
||||
import { getConfigValueFromTomlFile } from "./config-file";
|
||||
|
||||
export enum CacheLocalSource {
|
||||
Input,
|
||||
Config,
|
||||
Env,
|
||||
Default,
|
||||
}
|
||||
|
||||
export const workingDirectory = core.getInput("working-directory");
|
||||
export const version = core.getInput("version");
|
||||
export const versionFile = getVersionFile();
|
||||
export const pythonVersion = core.getInput("python-version");
|
||||
export const activateEnvironment = core.getBooleanInput("activate-environment");
|
||||
export const venvPath = getVenvPath();
|
||||
export const checkSum = core.getInput("checksum");
|
||||
export const enableCache = getEnableCache();
|
||||
export const restoreCache = core.getInput("restore-cache") === "true";
|
||||
export const saveCache = core.getInput("save-cache") === "true";
|
||||
export const cacheSuffix = core.getInput("cache-suffix") || "";
|
||||
export const cacheLocalPath = getCacheLocalPath();
|
||||
export const cacheDependencyGlob = getCacheDependencyGlob();
|
||||
export const pruneCache = core.getInput("prune-cache") === "true";
|
||||
export const cachePython = core.getInput("cache-python") === "true";
|
||||
export const ignoreNothingToCache =
|
||||
core.getInput("ignore-nothing-to-cache") === "true";
|
||||
export const ignoreEmptyWorkdir =
|
||||
core.getInput("ignore-empty-workdir") === "true";
|
||||
export const toolBinDir = getToolBinDir();
|
||||
export const toolDir = getToolDir();
|
||||
export const serverUrl = core.getInput("server-url");
|
||||
export const pythonDir = getUvPythonDir();
|
||||
export const githubToken = core.getInput("github-token");
|
||||
export const manifestFile = getManifestFile();
|
||||
export const addProblemMatchers =
|
||||
core.getInput("add-problem-matchers") === "true";
|
||||
export const resolutionStrategy = getResolutionStrategy();
|
||||
|
||||
function getVersionFile(): string {
|
||||
const versionFileInput = core.getInput("version-file");
|
||||
@@ -33,6 +46,18 @@ function getVersionFile(): string {
|
||||
return versionFileInput;
|
||||
}
|
||||
|
||||
function getVenvPath(): string {
|
||||
const venvPathInput = core.getInput("venv-path");
|
||||
if (venvPathInput !== "") {
|
||||
if (!activateEnvironment) {
|
||||
core.warning("venv-path is only used when activate-environment is true");
|
||||
}
|
||||
const tildeExpanded = expandTilde(venvPathInput);
|
||||
return normalizePath(resolveRelativePath(tildeExpanded));
|
||||
}
|
||||
return normalizePath(resolveRelativePath(".venv"));
|
||||
}
|
||||
|
||||
function getEnableCache(): boolean {
|
||||
const enableCacheInput = core.getInput("enable-cache");
|
||||
if (enableCacheInput === "auto") {
|
||||
@@ -75,24 +100,91 @@ function getToolDir(): string | undefined {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getCacheLocalPath(): string {
|
||||
function getCacheLocalPath():
|
||||
| {
|
||||
path: string;
|
||||
source: CacheLocalSource;
|
||||
}
|
||||
| undefined {
|
||||
const cacheLocalPathInput = core.getInput("cache-local-path");
|
||||
if (cacheLocalPathInput !== "") {
|
||||
const tildeExpanded = expandTilde(cacheLocalPathInput);
|
||||
return resolveRelativePath(tildeExpanded);
|
||||
return {
|
||||
path: resolveRelativePath(tildeExpanded),
|
||||
source: CacheLocalSource.Input,
|
||||
};
|
||||
}
|
||||
if (process.env.RUNNER_ENVIRONMENT === "github-hosted") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
return `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`;
|
||||
const cacheDirFromConfig = getCacheDirFromConfig();
|
||||
if (cacheDirFromConfig !== undefined) {
|
||||
return { path: cacheDirFromConfig, source: CacheLocalSource.Config };
|
||||
}
|
||||
if (process.env.UV_CACHE_DIR !== undefined) {
|
||||
core.info(`UV_CACHE_DIR is already set to ${process.env.UV_CACHE_DIR}`);
|
||||
return { path: process.env.UV_CACHE_DIR, source: CacheLocalSource.Env };
|
||||
}
|
||||
if (getEnableCache()) {
|
||||
if (process.env.RUNNER_ENVIRONMENT === "github-hosted") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
return {
|
||||
path: `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`,
|
||||
source: CacheLocalSource.Default,
|
||||
};
|
||||
}
|
||||
throw Error(
|
||||
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
|
||||
);
|
||||
}
|
||||
throw Error(
|
||||
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
|
||||
if (process.platform === "win32") {
|
||||
return {
|
||||
path: `${process.env.APPDATA}${path.sep}uv${path.sep}cache`,
|
||||
source: CacheLocalSource.Default,
|
||||
};
|
||||
}
|
||||
return {
|
||||
path: `${process.env.HOME}${path.sep}.cache${path.sep}uv`,
|
||||
source: CacheLocalSource.Default,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getCacheDirFromConfig(): string | undefined {
|
||||
for (const filePath of [versionFile, "uv.toml", "pyproject.toml"]) {
|
||||
const resolvedPath = resolveRelativePath(filePath);
|
||||
try {
|
||||
const cacheDir = getConfigValueFromTomlFile(resolvedPath, "cache-dir");
|
||||
if (cacheDir !== undefined) {
|
||||
core.info(`Found cache-dir in ${resolvedPath}: ${cacheDir}`);
|
||||
return cacheDir;
|
||||
}
|
||||
} catch (err) {
|
||||
const message = (err as Error).message;
|
||||
core.warning(`Error while parsing ${filePath}: ${message}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getUvPythonDir(): string {
|
||||
if (process.env.UV_PYTHON_INSTALL_DIR !== undefined) {
|
||||
core.info(
|
||||
`UV_PYTHON_INSTALL_DIR is already set to ${process.env.UV_PYTHON_INSTALL_DIR}`,
|
||||
);
|
||||
return process.env.UV_PYTHON_INSTALL_DIR;
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
return `${process.env.APPDATA}${path.sep}uv${path.sep}cache`;
|
||||
if (process.env.RUNNER_ENVIRONMENT !== "github-hosted") {
|
||||
if (process.platform === "win32") {
|
||||
return `${process.env.APPDATA}${path.sep}uv${path.sep}python`;
|
||||
} else {
|
||||
return `${process.env.HOME}${path.sep}.local${path.sep}share${path.sep}uv${path.sep}python`;
|
||||
}
|
||||
}
|
||||
return `${process.env.HOME}${path.sep}.cache${path.sep}uv`;
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
return `${process.env.RUNNER_TEMP}${path.sep}uv-python-dir`;
|
||||
}
|
||||
throw Error(
|
||||
"Could not determine UV_PYTHON_INSTALL_DIR. Please make sure RUNNER_TEMP is set or provide the UV_PYTHON_INSTALL_DIR environment variable",
|
||||
);
|
||||
}
|
||||
|
||||
function getCacheDependencyGlob(): string {
|
||||
@@ -115,17 +207,29 @@ function expandTilde(input: string): string {
|
||||
return input;
|
||||
}
|
||||
|
||||
function normalizePath(inputPath: string): string {
|
||||
const normalized = path.normalize(inputPath);
|
||||
const root = path.parse(normalized).root;
|
||||
|
||||
// Remove any trailing path separators, except when the whole path is the root.
|
||||
let trimmed = normalized;
|
||||
while (trimmed.length > root.length && trimmed.endsWith(path.sep)) {
|
||||
trimmed = trimmed.slice(0, -1);
|
||||
}
|
||||
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
function resolveRelativePath(inputPath: string): string {
|
||||
if (path.isAbsolute(inputPath)) {
|
||||
return inputPath;
|
||||
}
|
||||
let absolutePath = inputPath;
|
||||
if (absolutePath.startsWith("./")) {
|
||||
absolutePath = absolutePath.substring(2);
|
||||
}
|
||||
absolutePath = `${workingDirectory}${path.sep}${absolutePath}`;
|
||||
core.debug(`Resolving relative path ${inputPath} to ${absolutePath}`);
|
||||
return absolutePath;
|
||||
const hasNegation = inputPath.startsWith("!");
|
||||
const pathWithoutNegation = hasNegation ? inputPath.substring(1) : inputPath;
|
||||
|
||||
const resolvedPath = path.resolve(workingDirectory, pathWithoutNegation);
|
||||
|
||||
core.debug(
|
||||
`Resolving relative path ${inputPath} to ${hasNegation ? "!" : ""}${resolvedPath}`,
|
||||
);
|
||||
return hasNegation ? `!${resolvedPath}` : resolvedPath;
|
||||
}
|
||||
|
||||
function getManifestFile(): string | undefined {
|
||||
@@ -135,3 +239,16 @@ function getManifestFile(): string | undefined {
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getResolutionStrategy(): "highest" | "lowest" {
|
||||
const resolutionStrategyInput = core.getInput("resolution-strategy");
|
||||
if (resolutionStrategyInput === "lowest") {
|
||||
return "lowest";
|
||||
}
|
||||
if (resolutionStrategyInput === "highest" || resolutionStrategyInput === "") {
|
||||
return "highest";
|
||||
}
|
||||
throw new Error(
|
||||
`Invalid resolution-strategy: ${resolutionStrategyInput}. Must be 'highest' or 'lowest'.`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Octokit as Core } from "@octokit/core";
|
||||
import type {
|
||||
Constructor,
|
||||
OctokitOptions,
|
||||
} from "@octokit/core/dist-types/types";
|
||||
import {
|
||||
type PaginateInterface,
|
||||
paginateRest,
|
||||
} from "@octokit/plugin-paginate-rest";
|
||||
import { legacyRestEndpointMethods } from "@octokit/plugin-rest-endpoint-methods";
|
||||
import { fetch as customFetch } from "./fetch";
|
||||
|
||||
export type { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods";
|
||||
|
||||
const DEFAULTS = {
|
||||
baseUrl: "https://api.github.com",
|
||||
userAgent: "setup-uv",
|
||||
};
|
||||
|
||||
export const Octokit: typeof Core &
|
||||
Constructor<
|
||||
{
|
||||
paginate: PaginateInterface;
|
||||
} & ReturnType<typeof legacyRestEndpointMethods>
|
||||
> = Core.plugin(paginateRest, legacyRestEndpointMethods).defaults(
|
||||
function buildDefaults(options: OctokitOptions): OctokitOptions {
|
||||
return {
|
||||
...DEFAULTS,
|
||||
...options,
|
||||
request: {
|
||||
fetch: customFetch,
|
||||
...options.request,
|
||||
},
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
export type Octokit = InstanceType<typeof Octokit>;
|
||||
@@ -1,3 +1,5 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
export type Platform =
|
||||
@@ -11,6 +13,7 @@ export type Architecture =
|
||||
| "x86_64"
|
||||
| "aarch64"
|
||||
| "s390x"
|
||||
| "riscv64gc"
|
||||
| "powerpc64le";
|
||||
|
||||
export function getArch(): Architecture | undefined {
|
||||
@@ -19,6 +22,7 @@ export function getArch(): Architecture | undefined {
|
||||
arm64: "aarch64",
|
||||
ia32: "i686",
|
||||
ppc64: "powerpc64le",
|
||||
riscv64: "riscv64gc",
|
||||
s390x: "s390x",
|
||||
x64: "x86_64",
|
||||
};
|
||||
@@ -74,3 +78,77 @@ async function isMuslOs(): Promise<boolean> {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns OS name and version for cache key differentiation.
|
||||
* Examples: "ubuntu-22.04", "macos-14", "windows-2022"
|
||||
* Throws if OS detection fails.
|
||||
*/
|
||||
export function getOSNameVersion(): string {
|
||||
const platform = process.platform;
|
||||
|
||||
if (platform === "linux") {
|
||||
return getLinuxOSNameVersion();
|
||||
}
|
||||
if (platform === "darwin") {
|
||||
return getMacOSNameVersion();
|
||||
}
|
||||
if (platform === "win32") {
|
||||
return getWindowsNameVersion();
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported platform: ${platform}`);
|
||||
}
|
||||
|
||||
function getLinuxOSNameVersion(): string {
|
||||
const files = ["/etc/os-release", "/usr/lib/os-release"];
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const content = fs.readFileSync(file, "utf8");
|
||||
const id = parseOsReleaseValue(content, "ID");
|
||||
const versionId = parseOsReleaseValue(content, "VERSION_ID");
|
||||
// Fallback for rolling releases (debian:unstable/testing, arch, etc.)
|
||||
// that don't have VERSION_ID but have VERSION_CODENAME
|
||||
const versionCodename = parseOsReleaseValue(content, "VERSION_CODENAME");
|
||||
|
||||
if (id && versionId) {
|
||||
return `${id}-${versionId}`;
|
||||
}
|
||||
if (id && versionCodename) {
|
||||
return `${id}-${versionCodename}`;
|
||||
}
|
||||
} catch {
|
||||
// Try next file
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
"Failed to determine Linux distribution. " +
|
||||
"Could not read /etc/os-release or /usr/lib/os-release",
|
||||
);
|
||||
}
|
||||
|
||||
function parseOsReleaseValue(content: string, key: string): string | undefined {
|
||||
const regex = new RegExp(`^${key}=["']?([^"'\\n]*)["']?$`, "m");
|
||||
const match = content.match(regex);
|
||||
return match?.[1];
|
||||
}
|
||||
|
||||
function getMacOSNameVersion(): string {
|
||||
const darwinVersion = Number.parseInt(os.release().split(".")[0], 10);
|
||||
if (Number.isNaN(darwinVersion)) {
|
||||
throw new Error(`Failed to parse macOS version from: ${os.release()}`);
|
||||
}
|
||||
const macosVersion = darwinVersion - 9;
|
||||
return `macos-${macosVersion}`;
|
||||
}
|
||||
|
||||
function getWindowsNameVersion(): string {
|
||||
const version = os.version();
|
||||
const match = version.match(/Windows(?: Server)? (\d+)/);
|
||||
if (!match) {
|
||||
throw new Error(`Failed to parse Windows version from: ${version}`);
|
||||
}
|
||||
return `windows-${match[1]}`;
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import * as toml from "smol-toml";
|
||||
|
||||
export function getRequiredVersionFromConfigFile(
|
||||
filePath: string,
|
||||
): string | undefined {
|
||||
if (!filePath.endsWith(".toml")) {
|
||||
return undefined;
|
||||
}
|
||||
const fileContent = fs.readFileSync(filePath, "utf-8");
|
||||
|
||||
if (filePath.endsWith("pyproject.toml")) {
|
||||
const tomlContent = toml.parse(fileContent) as {
|
||||
tool?: { uv?: { "required-version"?: string } };
|
||||
};
|
||||
return tomlContent?.tool?.uv?.["required-version"];
|
||||
}
|
||||
const tomlContent = toml.parse(fileContent) as {
|
||||
"required-version"?: string;
|
||||
};
|
||||
return tomlContent["required-version"];
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import fs from "node:fs";
|
||||
import * as core from "@actions/core";
|
||||
import { getRequiredVersionFromConfigFile } from "./config-file";
|
||||
import { getConfigValueFromTomlFile } from "../utils/config-file";
|
||||
import { getUvVersionFromRequirementsFile } from "./requirements-file";
|
||||
import { getUvVersionFromToolVersions } from "./tool-versions-file";
|
||||
|
||||
export function getUvVersionFromFile(filePath: string): string | undefined {
|
||||
core.info(`Trying to find version for uv in: ${filePath}`);
|
||||
@@ -11,7 +12,10 @@ export function getUvVersionFromFile(filePath: string): string | undefined {
|
||||
}
|
||||
let uvVersion: string | undefined;
|
||||
try {
|
||||
uvVersion = getRequiredVersionFromConfigFile(filePath);
|
||||
uvVersion = getUvVersionFromToolVersions(filePath);
|
||||
if (uvVersion === undefined) {
|
||||
uvVersion = getConfigValueFromTomlFile(filePath, "required-version");
|
||||
}
|
||||
if (uvVersion === undefined) {
|
||||
uvVersion = getUvVersionFromRequirementsFile(filePath);
|
||||
}
|
||||
|
||||
31
src/version/tool-versions-file.ts
Normal file
31
src/version/tool-versions-file.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import fs from "node:fs";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
export function getUvVersionFromToolVersions(
|
||||
filePath: string,
|
||||
): string | undefined {
|
||||
if (!filePath.endsWith(".tool-versions")) {
|
||||
return undefined;
|
||||
}
|
||||
const fileContents = fs.readFileSync(filePath, "utf8");
|
||||
const lines = fileContents.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
// Skip commented lines
|
||||
if (line.trim().startsWith("#")) {
|
||||
continue;
|
||||
}
|
||||
const match = line.match(/^\s*uv\s*v?\s*(?<version>[^\s]+)\s*$/);
|
||||
if (match) {
|
||||
const matchedVersion = match.groups?.version.trim();
|
||||
if (matchedVersion?.startsWith("ref")) {
|
||||
core.warning(
|
||||
"The ref syntax of .tool-versions is not supported. Please use a released version instead.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
return matchedVersion;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
|
||||
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
|
||||
"module": "nodenext" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
|
||||
"noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
|
||||
"outDir": "./lib" /* Redirect output structure to the directory. */,
|
||||
"rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
|
||||
|
||||
23998
version-manifest.json
23998
version-manifest.json
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user