mirror of
https://github.com/containers/ramalama.git
synced 2026-02-05 06:46:39 +01:00
fixes docsite page linking
Signed-off-by: Ian Eaves <ian.k.eaves@gmail.com>
This commit is contained in:
@@ -7,7 +7,7 @@ This guide walks through the steps required to set up RamaLama with Ascend NPU s
|
||||
- [Hardware](#hardware)
|
||||
- [Model](#model)
|
||||
- [Docker](#docker)
|
||||
- [HISTORY](#todo)
|
||||
- [HISTORY](#history)
|
||||
|
||||
## Background
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# This Makefile provides commands for building and managing the RamaLama
|
||||
# documentation site powered by Docusaurus.
|
||||
|
||||
.PHONY: help convert dev build serve clean install
|
||||
.PHONY: help convert dev build serve clean install clean-generated
|
||||
|
||||
# Default target - show help
|
||||
help: ## Show this help message
|
||||
@@ -43,6 +43,14 @@ clean: ## Clean build artifacts and node_modules
|
||||
@rm -rf build .docusaurus node_modules
|
||||
@echo "✅ Clean complete!"
|
||||
|
||||
clean-generated: ## Clean auto-generated MDX files
|
||||
@echo "Cleaning auto-generated MDX files..."
|
||||
@find docs -name "*.mdx" -exec grep -l "# This file is auto-generated from manpages. Do not edit manually." {} \; | while read file; do \
|
||||
echo " Removing: $$file"; \
|
||||
rm "$$file"; \
|
||||
done
|
||||
@echo "✅ Auto-generated files cleaned!"
|
||||
|
||||
all: install convert build ## Install deps, convert manpages, and build site
|
||||
|
||||
# Development workflow targets
|
||||
|
||||
@@ -43,8 +43,14 @@ def extract_title_and_description(content, filename):
|
||||
|
||||
title = f"{platform} Setup"
|
||||
if base_name.endswith('.5.md'):
|
||||
# Config: ramalama.conf.5.md -> ramalama.conf
|
||||
title = base_name.replace('.5.md', '')
|
||||
# Config files with custom titles
|
||||
if base_name == 'ramalama.conf.5.md':
|
||||
title = 'Configuration File'
|
||||
elif base_name == 'ramalama-oci.5.md':
|
||||
title = 'OCI Spec'
|
||||
else:
|
||||
# Fallback for other .5.md files
|
||||
title = base_name.replace('.5.md', '')
|
||||
else:
|
||||
# Fallback
|
||||
title = base_name.replace('.md', '').replace('-', ' ')
|
||||
@@ -136,6 +142,8 @@ def convert_markdown_to_mdx(content, filename):
|
||||
if history_match:
|
||||
history_text = history_match.group(1).strip()
|
||||
content = re.sub(history_pattern, '', content, flags=re.DOTALL)
|
||||
# Remove TOC links to HISTORY since it becomes a footer
|
||||
content = re.sub(r'\s*- \[HISTORY\]\(#history\)\n?', '', content)
|
||||
# Add history as footer
|
||||
content += f"\n\n---\n\n*{history_text}*"
|
||||
|
||||
@@ -171,7 +179,7 @@ def convert_markdown_to_mdx(content, filename):
|
||||
if filename == 'ramalama.1.md':
|
||||
return f'[{text}](#)' # Self-reference
|
||||
else:
|
||||
return f'[{text}]({base_path}commands/ramalama/ramalama)'
|
||||
return f'[{text}](/docs/commands/ramalama/)' # Link to ramalama category index
|
||||
return f'[{text}]({base_path}commands/ramalama/{command_name})'
|
||||
if link.endswith('.5.md'):
|
||||
# Configuration file
|
||||
|
||||
@@ -156,7 +156,7 @@ ramalama bench granite3-moe
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ $ ramalama chat --url http://localhost:1234
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ granite-server
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -65,7 +65,7 @@ $ ramalama run oci://quay.io/kugupta/granite-3.2-q4-k-m:latest
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama), [ramalama-push(1)](../../commands/ramalama/push)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [ramalama-push(1)](../../commands/ramalama/push)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -352,7 +352,7 @@ $ ramalama info | jq .Shortnames.Names.mixtao
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ $ ramalama inspect smollm:135m --all --json
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ $ ramalama list --json
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ $ ramalama login --token=XYZ
|
||||
Logging in to ModelScope requires the `modelscope` tool. For installation and usage instructions, see the documentation of the ModelScope command line interface: [*https://www.modelscope.cn/docs/Beginner-s-Guide/Environment-Setup*](https://www.modelscope.cn/docs/Beginner-s-Guide/Environment-Setup).
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ Logout from huggingface
|
||||
$ ramalama logout huggingface
|
||||
```
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -164,7 +164,7 @@ ramalama perplexity granite3-moe
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ Print usage message
|
||||
require HTTPS and verify certificates when contacting OCI registries
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ Writing manifest to image destination
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama), [ramalama-convert(1)](../../commands/ramalama/convert)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [ramalama-convert(1)](../../commands/ramalama/convert)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ $ ls /tmp/output/docs/tmp/
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ $ ramalama rm --ignore bogusmodel
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -215,7 +215,7 @@ This program is a Python script that allows the user to interact with a terminal
|
||||
See [ramalama-cuda(7)](../../platform-guides/cuda) for setting up the host Linux system for CUDA support.
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama), [ramalama-cuda(7)](../../platform-guides/cuda)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [ramalama-cuda(7)](../../platform-guides/cuda)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -522,7 +522,7 @@ ramalama --runtime=mlx serve hf://mlx-community/Unsloth-Phi-4-4bit
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama), [ramalama-stop(1)](../../commands/ramalama/stop), **quadlet(1)**, **systemctl(1)**, **podman(1)**, **podman-ps(1)**, [ramalama-cuda(7)](../../platform-guides/cuda)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [ramalama-stop(1)](../../commands/ramalama/stop), **quadlet(1)**, **systemctl(1)**, **podman(1)**, **podman-ps(1)**, [ramalama-cuda(7)](../../platform-guides/cuda)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ $ ramalama stop --all
|
||||
```
|
||||
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama), [ramalama-run(1)](../../commands/ramalama/run), [ramalama-serve(1)](../../commands/ramalama/serve)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [ramalama-run(1)](../../commands/ramalama/run), [ramalama-serve(1)](../../commands/ramalama/serve)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ $ ramalama -q version
|
||||
>
|
||||
```
|
||||
## See Also
|
||||
[ramalama(1)](../../commands/ramalama/ramalama)
|
||||
[ramalama(1)](/docs/commands/ramalama/)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
---
|
||||
title: ramalama.conf
|
||||
title: Configuration File
|
||||
description: Configuration file reference
|
||||
# This file is auto-generated from manpages. Do not edit manually.
|
||||
# Source: ramalama.conf.5.md
|
||||
---
|
||||
|
||||
# ramalama.conf
|
||||
# Configuration File
|
||||
|
||||
# NAME
|
||||
ramalama.conf - These configuration files specifies default
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
---
|
||||
title: ramalama-oci
|
||||
title: OCI Spec
|
||||
description: Configuration file reference
|
||||
# This file is auto-generated from manpages. Do not edit manually.
|
||||
# Source: ramalama-oci.5.md
|
||||
---
|
||||
|
||||
# ramalama-oci
|
||||
# OCI Spec
|
||||
|
||||
# NAME
|
||||
ramalama-oci - RamaLama oci:// Image Format
|
||||
|
||||
@@ -77,7 +77,7 @@ Once RamaLama is installed, you can:
|
||||
2. **Run a model**: `ramalama run ollama://tinyllama`
|
||||
3. **Explore available commands**: `ramalama --help`
|
||||
|
||||
For detailed usage instructions, see the [Commands](../commands/ramalama/ramalama) section.
|
||||
For detailed usage instructions, see the [Commands](../commands/ramalama/) section.
|
||||
|
||||
## Platform-Specific Setup
|
||||
|
||||
|
||||
@@ -14,8 +14,6 @@ This guide walks through the steps required to set up RamaLama with Ascend NPU s
|
||||
- [Hardware](#hardware)
|
||||
- [Model](#model)
|
||||
- [Docker](#docker)
|
||||
- [HISTORY](#todo)
|
||||
|
||||
## Background
|
||||
|
||||
**Ascend NPU** is a range of AI processors using Neural Processing Unit. It will efficiently handle matrix-matrix multiplication, dot-product and scalars.
|
||||
|
||||
@@ -186,7 +186,7 @@ sudo nvidia-ctk cdi generate --output=/etc/cdi/nvidia.yaml
|
||||
|
||||
## See Also
|
||||
|
||||
[ramalama(1)](../commands/ramalama/ramalama), [podman(1)](https://github.com/containers/podman/blob/main/docs/source/markdown/podman.1.md)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [podman(1)](https://github.com/containers/podman/blob/main/docs/source/markdown/podman.1.md)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ Refer to the [Podman Desktop documentation](https://podman-desktop.io/docs/podma
|
||||
|
||||
## See Also
|
||||
|
||||
[ramalama(1)](../commands/ramalama/ramalama), [podman-machine(1)](https://github.com/containers/podman/blob/main/docs/source/markdown/podman-machine.1.md)
|
||||
[ramalama(1)](/docs/commands/ramalama/), [podman-machine(1)](https://github.com/containers/podman/blob/main/docs/source/markdown/podman-machine.1.md)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -97,7 +97,7 @@ const config: Config = {
|
||||
items: [
|
||||
{
|
||||
label: 'Getting Started',
|
||||
to: '/docs/getting-started',
|
||||
to: '/docs/getting-started/installation',
|
||||
},
|
||||
{
|
||||
label: 'API References',
|
||||
|
||||
Reference in New Issue
Block a user