diff --git a/macros/data-orchestrator/run-workflow.mdx b/macros/data-orchestrator/run-workflow.mdx
new file mode 100644
index 0000000000..88c8f1e0a9
--- /dev/null
+++ b/macros/data-orchestrator/run-workflow.mdx
@@ -0,0 +1,11 @@
+---
+macro: run-workflow
+---
+
+1. Click **Data Orchestrator** in the **Data & Analytics** section of the [console](https://console.scaleway.com/) side menu. The list of your workflows displays.
+2. Click the name of the workflow you want to run. The workflow **Overview** tab displays.
+3. Click the **Actions** button, then click **Run** in the drop-down menu. Your workflow starts, and a new entry appears in the **Runs** tab.
+
+
+ On the **Data Orchestrator** page, you can also click the run icon next to the workflow you want to run.
+
diff --git a/macros/data-orchestrator/upload-workflow.mdx b/macros/data-orchestrator/upload-workflow.mdx
new file mode 100644
index 0000000000..4e4a9d05e9
--- /dev/null
+++ b/macros/data-orchestrator/upload-workflow.mdx
@@ -0,0 +1,9 @@
+---
+macro: upload-workflow
+---
+
+The workflow upload window is displayed during the creation of a workflow or when clicking **Edit** from a workflow in Code view.
+
+1. Click **Upload**.
+2. Select the YAML file to use as the workflow definition.
+3. Click **Save changes** to finish, or click **Save and run** if you want to save and run the workflow.
diff --git a/menu/navigation.ts b/menu/navigation.ts
index 129f87bc79..d3b90da4de 100644
--- a/menu/navigation.ts
+++ b/menu/navigation.ts
@@ -11,6 +11,7 @@ import { cockpitMenu } from "../pages/cockpit/menu"
import { containerRegistryMenu } from "../pages/container-registry/menu"
import { cpanelHostingMenu } from "../pages/cpanel-hosting/menu"
import { dataLabMenu } from "../pages/data-lab/menu"
+import { dataOrchestratorMenu } from "../pages/data-orchestrator/menu"
import { dataWarehouseMenu } from "../pages/data-warehouse/menu"
import { dediboxMenu } from "../pages/dedibox/menu"
import { dediboxAccountMenu } from "../pages/dedibox-account/menu"
@@ -155,6 +156,7 @@ export default [
{
icon: 'DataAndAnalyticsCategoryIcon',
items: [
+ dataOrchestratorMenu,
dataWarehouseMenu,
dataLabMenu,
clustersForKafkaMenu,
diff --git a/pages/data-orchestrator/concepts.mdx b/pages/data-orchestrator/concepts.mdx
new file mode 100644
index 0000000000..e1452c3c76
--- /dev/null
+++ b/pages/data-orchestrator/concepts.mdx
@@ -0,0 +1,61 @@
+---
+title: Data Orchestrator - Concepts
+description: Learn the fundamental concepts of Scaleway Data Orchestrator.
+tags: data-orchestrator
+dates:
+ validation: 2026-04-01
+---
+
+## Orchestration
+
+Orchestration is the automated coordination of tasks and workflows that keeps data operations reliable, scalable, and maintainable. In the context of Scaleway Data Orchestrator, it enables users to define, schedule, and manage complex data pipelines. It also handles dependencies, error recovery, and execution order seamlessly. Instead of manually triggering scripts or monitoring jobs, orchestration brings structure and intelligence, turning fragmented processes into unified, business-aligned workflows.
+
+## Tasks
+
+### Action task
+
+An action task represents the executable unit within a workflow that performs concrete work. An action task can be:
+- **Serverless Jobs**: Long-running batch processes that scale automatically without infrastructure management.
+- **Serverless Functions**: Lightweight, event-driven code execution for quick transformations or API calls.
+- **Spark Jobs**: Distributed data processing tasks for large-scale ETL or analytics using Apache Spark.
+- Other compute-intensive or service-specific jobs (e.g., data validation, model inference).
+
+These tasks are orchestrated in sequence or in parallel, forming the backbone of data processing pipelines.
+
+### Logic task
+
+A Logic task controls the flow and decision-making within a workflow, enabling dynamic behavior beyond simple linear execution. A logic task can be:
+- **Switch**: Direct flow based on runtime conditions (e.g., file size, data quality).
+- **Fork**: Split execution into parallel branches to process data concurrently.
+- **Try catch**: Implement error-handling blocks to manage failures and enable retries or fallback logic.
+
+These tasks allow users to embed business logic directly into pipelines, making them resilient and adaptable.
+
+## Trigger
+
+A trigger is the event that initiates a workflow execution. A trigger can be:
+- **Manual**: User starts the run via the Scaleway Console or CLI (ideal for testing).
+- **Schedule**: Automatic execution based on time (e.g., daily at 8:00 AM), set with a built-in scheduler.
+- **Event**: Triggered by external signals (e.g., new file in object storage, message in a queue), enabling reactive, real-time data processing.
+
+## Views
+
+### Code view
+
+Every workflow can be visualized as code, showing tasks and their dependencies.
+
+### Graph view
+
+Every workflow can be visualized as a Directed Acyclic Graph (DAG), showing the tasks and their dependencies.
+
+## Workflow
+
+A workflow is a structured sequence of action tasks and logical tasks that define an end-to-end data process.
+
+### Workflow definition
+
+The declarative blueprint of a workflow, typically described in code (e.g., YAML or Python) or designed visually. It specifies tasks, dependencies, conditions, and execution parameters. This definition is version-controlled, reusable, and portable across environments.
+
+### Workflow execution / run
+
+The runtime instance of a workflow definition. Each execution (or run) tracks the state, logs, and results of every task, providing full observability and auditability. Runs can succeed, fail, or be paused, with detailed insights for debugging.
\ No newline at end of file
diff --git a/pages/data-orchestrator/how-to/create-workflow.mdx b/pages/data-orchestrator/how-to/create-workflow.mdx
new file mode 100644
index 0000000000..ecffc8f785
--- /dev/null
+++ b/pages/data-orchestrator/how-to/create-workflow.mdx
@@ -0,0 +1,88 @@
+---
+title: How to create a workflow using Data Orchestrator
+description: This page explains how to create a Data Orchestrator workflow on the Scaleway console.
+tags: data-orchestrator workflow create upload
+dates:
+ posted: 2026-04-01
+---
+import Requirements from '@macros/iam/requirements.mdx'
+import UploadWorkflow from '@macros/data-orchestrator/upload-workflow.mdx'
+import RunWorkflow from '@macros/data-orchestrator/run-workflow.mdx'
+
+
+
+ Data Orchestrator is currently in Private Beta. During this Beta period:
+ - **Code mode** is the only supported mode.
+ - The workflows only run Scaleway Serverless Jobs.
+
+
+
+
+- A Scaleway account logged into the [console](https://console.scaleway.com)
+- [Owner](/iam/concepts/#owner) status or [IAM permissions](/iam/concepts/#permission) allowing you to perform actions in the intended Organization
+- A [Container Registry namespace](/container-registry/how-to/create-namespace/)
+- An image [pushed](/container-registry/how-to/push-images/) to your namespace (for task execution)
+- Created at least three [Serverless Jobs](/serverless-jobs/how-to/create-job/)
+- Basic familiarity with YAML syntax
+- Basic familiarity with [Serverless workflow.io](https://serverlessworkflow.io/) language
+
+## Create a workflow definition YAML file
+
+Data Orchestrator requires the user to upload a YAML file to use as a workflow definition.
+
+
+ This is a basic example of a sequential workflow, more examples will be available soon.
+
+
+1. Create a YAML file, copy/paste the following content into the file.
+ ```yaml
+ document:
+ dsl: 1.0.0
+ namespace:
+ name:
+ version: 1.0.0
+ do:
+ - task1:
+ call: "serverless_job"
+ metadata:
+ description: "The first job"
+ with:
+ id: ""
+ region: "fr-par"
+ - task2:
+ call: "serverless_job"
+ metadata:
+ description: "The second job"
+ with:
+ id: ""
+ region: "fr-par"
+ - task3:
+ call: "serverless_job"
+ metadata:
+ description: "The third job"
+ with:
+ id: ""
+ region: "fr-par"
+ ```
+2. Replace the ID placeholders with the IDs of your Serverless Jobs. You can copy them from the [Serverless Jobs page](https://console.scaleway.com/serverless-jobs/).
+
+ The fields `namespace` and `name` are required. They can be used as metadata to help organize your YAML files.
+
+3. Save the YAML file.
+
+## Create a Data Orchestrator workflow
+
+1. Click **Data Orchestrator** in the **Data & Analytics** section of the [console](https://console.scaleway.com/) side menu. The list of your workflows displays.
+2. Click **+ Create workflow**. The Data Orchestrator workflow creation wizard displays.
+3. Select the region in which your workflow will be created.
+4. Enter a name or use the automatically generated one.
+5. Enter a description (optional)
+6. Click **Create workflow only** to finish, or click **Create workflow and add tasks** if you want to upload a workflow next.
+
+## Upload the workflow definition
+
+
+
+## Run a workflow
+
+
\ No newline at end of file
diff --git a/pages/data-orchestrator/how-to/index.mdx b/pages/data-orchestrator/how-to/index.mdx
new file mode 100644
index 0000000000..0ff7a4dc7e
--- /dev/null
+++ b/pages/data-orchestrator/how-to/index.mdx
@@ -0,0 +1,4 @@
+---
+title: Data Orchestrator - How Tos
+description: Practical guides for using Scaleway Data Orchestrator.
+---
\ No newline at end of file
diff --git a/pages/data-orchestrator/how-to/run-workflow.mdx b/pages/data-orchestrator/how-to/run-workflow.mdx
new file mode 100644
index 0000000000..4c470d2a5f
--- /dev/null
+++ b/pages/data-orchestrator/how-to/run-workflow.mdx
@@ -0,0 +1,25 @@
+---
+title: How to run a workflow using Data Orchestrator
+description: This page explains how to run a Data Orchestrator workflow on the Scaleway console.
+tags: data-orchestrator workflow run
+dates:
+ posted: 2026-04-20
+---
+import Requirements from '@macros/iam/requirements.mdx'
+import RunWorkflow from '@macros/data-orchestrator/run-workflow.mdx'
+
+
+ Data Orchestrator is currently in Private Beta. During this Beta period:
+ - **Code mode** is the only supported mode.
+ - The workflows only run Scaleway Serverless Jobs.
+
+
+
+
+- A Scaleway account logged into the [console](https://console.scaleway.com)
+- [Owner](/iam/concepts/#owner) status or [IAM permissions](/iam/concepts/#permission) allowing you to perform actions in the intended Organization
+- Created a [Data Orchestrator workflow](/data-orchestrator/how-to/create-workflow/)
+
+## Run a workflow
+
+
diff --git a/pages/data-orchestrator/index.mdx b/pages/data-orchestrator/index.mdx
new file mode 100644
index 0000000000..a282abe6d7
--- /dev/null
+++ b/pages/data-orchestrator/index.mdx
@@ -0,0 +1,42 @@
+---
+title: Data Orchestrator Documentation
+description: Comprehensive documentation on Scaleway Data Orchestrator.
+---
+
+
+ Data Orchestrator is currently in Private Beta.
+
+
+
+
+## Getting Started
+
+
+
+
+
+
+## Changelog
+
+
diff --git a/pages/data-orchestrator/menu.ts b/pages/data-orchestrator/menu.ts
new file mode 100644
index 0000000000..d0322b3b45
--- /dev/null
+++ b/pages/data-orchestrator/menu.ts
@@ -0,0 +1,28 @@
+export const dataOrchestratorMenu = {
+ items: [
+ {
+ label: 'Overview',
+ slug: '../data-orchestrator',
+ },
+ {
+ label: 'Concepts',
+ slug: 'concepts',
+ },
+ {
+ items: [
+ {
+ label: 'Create a workflow',
+ slug: 'create-workflow',
+ },
+ {
+ label: 'Run a workflow',
+ slug: 'run-workflow',
+ },
+ ],
+ label: 'How to',
+ slug: 'how-to',
+ },
+ ],
+ label: 'Data Orchestrator',
+ slug: 'data-orchestrator',
+}