Skip to content
4 changes: 2 additions & 2 deletions sandboxes/base/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \
apt-get install -y --no-install-recommends \
build-essential \
git \
nodejs=22.22.1-1nodesource1 \
nodejs \
vim-tiny \
nano \
&& rm -rf /var/lib/apt/lists/* \
Expand All @@ -74,7 +74,7 @@ RUN npm install -g \
tar@7.5.11 \
@hono/node-server@1.19.11 \
opencode-ai@1.2.18 \
@openai/codex@0.111.0 \
@openai/codex@0.117.0 \
@github/copilot@1.0.9

# GitHub CLI
Expand Down
10 changes: 10 additions & 0 deletions sandboxes/base/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,13 @@ FROM ${BASE_IMAGE}
```

See `sandboxes/openclaw/` for an example.

## Codex authentication

For remote or headless OpenShell environments, if browser login hangs, try authenticating Codex with:

```bash
codex login --device-auth
```

If device-code login is unreliable in your environment, you can authenticate on another machine and copy ~/.codex/auth.json into the sandbox.
11 changes: 11 additions & 0 deletions sandboxes/base/policy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -172,3 +172,14 @@ network_policies:
- { host: default.exp-tas.com, port: 443 }
binaries:
- { path: /usr/lib/node_modules/@github/copilot/node_modules/@github/**/copilot }

codex:
name: codex
endpoints:
- { host: api.openai.com, port: 443 }
- { host: auth.openai.com, port: 443 }
- { host: chatgpt.com, port: 443 }
binaries:
- { path: /usr/bin/codex }
- { path: /usr/bin/node }
- { path: "/usr/lib/node_modules/@openai/**" }
84 changes: 84 additions & 0 deletions sandboxes/test/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# syntax=docker/dockerfile:1.4

# SPDX-License-Identifier: Apache-2.0

# Ollama sandbox image for OpenShell
#
# Builds on the community base sandbox (has Node.js, Claude, Codex pre-installed).
# Build: docker build -t openshell-ollama --build-arg BASE_IMAGE=openshell-base .
# Run: openshell sandbox create --from ollama --forward 11434

ARG BASE_IMAGE=ghcr.io/nvidia/openshell-community/sandboxes/base:latest
FROM ${BASE_IMAGE}

USER root


# Install Miniconda as root during build
RUN apt-get update && apt-get install -y wget && \
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O /tmp/miniconda.sh && \
bash /tmp/miniconda.sh -b -p /opt/conda && \
rm /tmp/miniconda.sh && \
/opt/conda/bin/conda clean -afy

# Accept Anaconda ToS and install default conda packages
RUN /opt/conda/bin/conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/main \
&& /opt/conda/bin/conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/r \
&& /opt/conda/bin/conda install -y -c conda-forge -c nvidia \
vim htop nvtop ncurses numpy matplotlib cuda-toolkit \
&& /opt/conda/bin/conda clean -afy

# Make conda accessible to the sandbox user
RUN chown -R sandbox:sandbox /opt/conda

# Add conda to PATH for the sandbox user
ENV PATH="/opt/conda/bin:$PATH"

# Install zstd (required by Ollama install script)
RUN apt-get update && apt-get install -y --no-install-recommends zstd \
&& rm -rf /var/lib/apt/lists/*

# Install Ollama into /sandbox/bin so it lives on a writable path and can be
# updated at runtime (the sandbox policy makes /usr read-only).
RUN mkdir -p /sandbox/bin && \
curl -fsSL https://ollama.com/install.sh | sh && \
mv /usr/local/bin/ollama /sandbox/bin/ollama && \
chown -R sandbox:sandbox /sandbox/bin

# Copy sandbox policy
COPY policy.yaml /etc/openshell/policy.yaml

# Copy entrypoint and update scripts
COPY entrypoint.sh /usr/local/bin/entrypoint
COPY update-ollama.sh /sandbox/bin/update-ollama
RUN chmod +x /usr/local/bin/entrypoint /sandbox/bin/update-ollama

# Set environment variables for OpenShell provider discovery
# /sandbox/bin comes first so the writable ollama binary is preferred
ENV OLLAMA_HOST=http://127.0.0.1:11434 \
NPM_CONFIG_PREFIX=/sandbox/.npm-global \
ANTHROPIC_MODEL=claude-opus-4-7 \
CLAUDE_CODE_EFFORT_LEVEL=max \
PATH="/sandbox/bin:/sandbox/.npm-global/bin:/sandbox/.venv/bin:/usr/local/bin:/usr/bin:/bin"

# Configure npm to install globals into a writable directory
# (the sandbox policy makes /usr read-only, so the default /usr/lib/node_modules fails)
RUN mkdir -p /sandbox/.npm-global && \
chown sandbox:sandbox /sandbox/.npm-global

# Add environment variables to .bashrc for interactive shells
RUN echo 'export OLLAMA_HOST=http://127.0.0.1:11434' >> /sandbox/.bashrc && \
echo 'export NPM_CONFIG_PREFIX=/sandbox/.npm-global' >> /sandbox/.bashrc && \
echo 'export PATH="/sandbox/bin:/sandbox/.npm-global/bin:$PATH"' >> /sandbox/.bashrc && \
echo 'export PATH="/opt/conda/bin:$PATH"' >> /sandbox/.bashrc && \
echo 'export GIT_SSL_CAINFO=/etc/openshell-tls/ca-bundle.pem' >> /sandbox/.bashrc && \
echo 'export ANTHROPIC_MODEL=claude-opus-4-7' >> /sandbox/.bashrc && \
echo 'export CLAUDE_CODE_EFFORT_LEVEL=max' >> /sandbox/.bashrc && \
chown sandbox:sandbox /sandbox/.bashrc

ENV GIT_SSL_CAINFO=/etc/openshell-tls/ca-bundle.pem

USER sandbox

ENTRYPOINT ["/usr/local/bin/entrypoint"]
CMD ["/bin/bash", "-l"]
40 changes: 40 additions & 0 deletions sandboxes/test/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Ollama Sandbox

OpenShell sandbox image pre-configured with [Ollama](https://ollama.com) for running local LLMs.

## What's Included

- **Ollama** — Ollama runs cloud and local models and connects them to tools like Claude Code, Codex, OpenCode, and more.
- **Auto-start** — Ollama server starts automatically when the sandbox starts
- **Pre-configured** — `OLLAMA_HOST` is set for OpenShell provider discovery
- **Claude Code** — Pre-installed (`claude` command)
- **Codex** — Pre-installed (`@openai/codex` npm package)
- **Node.js 22** — Runtime for npm-based tools
- **npm global** — Configured to install to user directory (works with read-only `/usr`)

## Build

```bash
docker build -t openshell-ollama .
```

## Usage

### Create a sandbox

```bash
openshell sandbox create --from ollama
```

### Update Ollama inside the sandbox

```bash
update-ollama
```

Or auto-update on startup:

```bash
openshell sandbox create --from ollama -e OLLAMA_UPDATE=1
```

64 changes: 64 additions & 0 deletions sandboxes/test/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#!/usr/bin/env bash

# SPDX-FileCopyrightText: Copyright (c) 2025-2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0

# Entrypoint for Ollama sandbox — auto-starts Ollama server
set -euo pipefail

# Export OLLAMA_HOST for OpenShell provider discovery
export OLLAMA_HOST="${OLLAMA_HOST:-http://127.0.0.1:11434}"

# Update Ollama if requested
if [ "${OLLAMA_UPDATE:-0}" = "1" ]; then
echo "[ollama] Updating to latest version..."
update-ollama
fi

# Start Ollama server in background
echo "[ollama] Starting Ollama server..."
nohup ollama serve > /tmp/ollama.log 2>&1 &
OLLAMA_PID=$!

# Wait for server to be ready
echo "[ollama] Waiting for server to be ready..."
for i in {1..60}; do
if curl -fsSL http://127.0.0.1:11434/api/tags > /dev/null 2>&1; then
echo "[ollama] Server ready at http://127.0.0.1:11434"
break
fi
if ! kill -0 $OLLAMA_PID 2>/dev/null; then
echo "[ollama] Server failed to start. Check /tmp/ollama.log"
exit 1
fi
sleep 1
done

# Pull default model if specified and not already present
if [ -n "${OLLAMA_DEFAULT_MODEL:-}" ]; then
if ! ollama list | grep -q "^${OLLAMA_DEFAULT_MODEL}"; then
echo "[ollama] Pulling model: ${OLLAMA_DEFAULT_MODEL}"
ollama pull "${OLLAMA_DEFAULT_MODEL}"
echo "[ollama] Model ${OLLAMA_DEFAULT_MODEL} ready"
fi
fi

# Print connection info
echo ""
echo "========================================"
echo "Ollama sandbox ready!"
echo " API: http://127.0.0.1:11434"
echo " Logs: /tmp/ollama.log"
echo " PID: ${OLLAMA_PID}"
if [ -n "${OLLAMA_DEFAULT_MODEL:-}" ]; then
echo " Model: ${OLLAMA_DEFAULT_MODEL}"
fi
echo "========================================"
echo ""

# Execute the provided command or start an interactive shell
if [ $# -eq 0 ]; then
exec /bin/bash -l
else
exec "$@"
fi
Loading