Browse Source

[7] Implemented log display

Logs of container build and run steps can now be displayed
in the frontend.
at-robins 7 months ago
parent
commit
cbeac92b6d

+ 100 - 10
backend/src/application/config.rs

@@ -19,7 +19,7 @@ pub const PATH_FILES_EXPERIMENTS_LOGS: &str = "logs";
 /// The folder where global data is stored.
 pub const PATH_FILES_GLOBAL_DATA: &str = "globals";
 
-use std::{hash::Hash, hash::Hasher, path::PathBuf, time::SystemTime};
+use std::{fmt::Display, hash::Hash, hash::Hasher, path::PathBuf, time::SystemTime};
 
 use getset::Getters;
 use serde::{Deserialize, Serialize};
@@ -222,22 +222,57 @@ impl Configuration {
     /// * `pipeline_id` - the ID of the pipeline
     /// * `step_id` - the ID of the pipeline step
     /// * `process_type` - the type of process to log
-    pub fn experiment_log_path<P: AsRef<str>, Q: AsRef<str>, R: AsRef<str>, S: AsRef<str>>(
+    /// * `output_type` - the output type of the process to log
+    pub fn experiment_log_path<P: AsRef<str>, Q: AsRef<str>, R: AsRef<str>>(
         &self,
         experiment_id: P,
         pipeline_id: Q,
         step_id: R,
-        process_type: S,
+        process_type: LogProcessType,
+        output_type: LogOutputType,
     ) -> PathBuf {
         let mut path: PathBuf = self.experiment_logs_path(experiment_id);
         path.push(format!(
-            "{}_{}.log",
+            "{}_{}_{}.log",
             Self::hash_string(format!("{}{}", pipeline_id.as_ref(), step_id.as_ref())),
-            process_type.as_ref()
+            process_type,
+            output_type,
         ));
         path
     }
 
+    /// All potential context paths of pipeline log files.
+    ///
+    /// # Parameters
+    ///
+    /// * `experiment_id` - the ID of the experiment
+    /// * `pipeline_id` - the ID of the pipeline
+    /// * `step_id` - the ID of the pipeline step
+    pub fn experiment_log_paths_all<P: AsRef<str>, Q: AsRef<str>, R: AsRef<str>>(
+        &self,
+        experiment_id: P,
+        pipeline_id: Q,
+        step_id: R,
+    ) -> Vec<PathBuf> {
+        let mut paths = Vec::new();
+        for process_type in &[LogProcessType::Build, LogProcessType::Run] {
+            for output_type in &[
+                LogOutputType::StdOut,
+                LogOutputType::StdErr,
+                LogOutputType::ExitCode,
+            ] {
+                paths.push(self.experiment_log_path(
+                    experiment_id.as_ref(),
+                    pipeline_id.as_ref(),
+                    step_id.as_ref(),
+                    *process_type,
+                    *output_type,
+                ));
+            }
+        }
+        paths
+    }
+
     /// Generates a V1 UUID.
     pub fn generate_uuid() -> Uuid {
         let now = SystemTime::now()
@@ -265,6 +300,53 @@ impl Configuration {
     }
 }
 
+#[derive(Debug, Clone, Copy)]
+/// The process types of log files.
+pub enum LogProcessType {
+    /// The build process.
+    Build,
+    // The run process.
+    Run,
+}
+
+impl Display for LogProcessType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "{}",
+            match self {
+                LogProcessType::Build => "build",
+                LogProcessType::Run => "run",
+            }
+        )
+    }
+}
+
+#[derive(Debug, Clone, Copy)]
+/// The output types of log files.
+pub enum LogOutputType {
+    /// Standard output stream.
+    StdOut,
+    /// Standard error stream.
+    StdErr,
+    /// The exit code.
+    ExitCode,
+}
+
+impl Display for LogOutputType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "{}",
+            match self {
+                LogOutputType::StdOut => "stdout",
+                LogOutputType::StdErr => "stderr",
+                LogOutputType::ExitCode => "exitcode",
+            }
+        )
+    }
+}
+
 #[cfg(test)]
 mod tests {
 
@@ -331,19 +413,27 @@ mod tests {
     fn test_experiment_logs_path() {
         let config = Configuration::new("", "", "", "", "./application/context", "");
         // Hash of step_id.
-        let path: PathBuf =
-            "./application/context/experiments/experiment_id/logs".into();
+        let path: PathBuf = "./application/context/experiments/experiment_id/logs".into();
         assert_eq!(config.experiment_logs_path("experiment_id"), path);
     }
 
-
     #[test]
     fn test_experiment_log_path() {
         let config = Configuration::new("", "", "", "", "./application/context", "");
         // Hash of step_id.
         let path: PathBuf =
-            "./application/context/experiments/experiment_id/logs/13269802908832430007_type.log".into();
-        assert_eq!(config.experiment_log_path("experiment_id", "pipeline_id", "step_id", "type"), path);
+            "./application/context/experiments/experiment_id/logs/13269802908832430007_build_stderr.log"
+                .into();
+        assert_eq!(
+            config.experiment_log_path(
+                "experiment_id",
+                "pipeline_id",
+                "step_id",
+                LogProcessType::Build,
+                LogOutputType::StdErr
+            ),
+            path
+        );
     }
 
     #[test]

+ 3 - 2
backend/src/controller.rs

@@ -1,5 +1,6 @@
-pub mod global_data_controller;
+pub mod experiment_controller;
 pub mod file_controller;
+pub mod global_data_controller;
+pub mod log_controller;
 pub mod pipeline_controller;
 pub mod routing;
-pub mod experiment_controller;

+ 11 - 20
backend/src/controller/experiment_controller.rs

@@ -95,6 +95,11 @@ pub async fn get_experiment_execution_status(
     let execution_steps = ExperimentExecution::get_by_experiment(id, &mut connection)?;
     let result = if execution_steps.is_empty() {
         "None".to_string()
+    } else if execution_steps
+        .iter()
+        .any(|execution| execution.execution_status == ExecutionStatus::Running.to_string())
+    {
+        ExecutionStatus::Running.to_string()
     } else if execution_steps
         .iter()
         .any(|execution| execution.execution_status == ExecutionStatus::Failed.to_string())
@@ -105,11 +110,6 @@ pub async fn get_experiment_execution_status(
         .any(|execution| execution.execution_status == ExecutionStatus::Aborted.to_string())
     {
         ExecutionStatus::Aborted.to_string()
-    } else if execution_steps
-        .iter()
-        .any(|execution| execution.execution_status == ExecutionStatus::Running.to_string())
-    {
-        ExecutionStatus::Running.to_string()
     } else if execution_steps
         .iter()
         .all(|execution| execution.execution_status == ExecutionStatus::Finished.to_string())
@@ -507,29 +507,20 @@ pub async fn post_execute_experiment_step(
                             "The requested run parameters are invalid.",
                         ));
                     }
-                    // Remove resources realted to the pipeline step.
+                    // Remove resources related to the pipeline step.
                     let step_path =
                         app_config.experiment_step_path(experiment_id.to_string(), &step_id);
                     if step_path.exists() {
                         std::fs::remove_dir_all(step_path)?;
                     }
-                    let log_path_build = app_config.experiment_log_path(
+                    for log_path in app_config.experiment_log_paths_all(
                         experiment_id.to_string(),
                         &pipeline_id,
                         &step_id,
-                        "build",
-                    );
-                    if log_path_build.exists() {
-                        std::fs::remove_file(log_path_build)?;
-                    }
-                    let log_path_run = app_config.experiment_log_path(
-                        experiment_id.to_string(),
-                        &pipeline_id,
-                        &step_id,
-                        "run",
-                    );
-                    if log_path_run.exists() {
-                        std::fs::remove_file(log_path_run)?;
+                    ) {
+                        if log_path.exists() {
+                            std::fs::remove_file(log_path)?;
+                        }
                     }
                     connection.immediate_transaction(|connection| {
                         let clear_time: Option<NaiveDateTime> = None;

+ 84 - 0
backend/src/controller/log_controller.rs

@@ -0,0 +1,84 @@
+use crate::{
+    application::{
+        config::{Configuration, LogOutputType, LogProcessType},
+        database::DatabaseManager,
+        error::SeqError,
+    },
+    model::{
+        db::experiment::Experiment,
+        exchange::experiment_step_logs::{
+            ExperimentStepLog, ExperimentStepLogRequest, ExperimentStepLogs,
+        },
+    },
+};
+use actix_web::web;
+
+pub async fn get_experiment_step_logs(
+    database_manager: web::Data<DatabaseManager>,
+    app_config: web::Data<Configuration>,
+    experiment_id: web::Path<i32>,
+    info: web::Json<ExperimentStepLogRequest>,
+) -> Result<web::Json<ExperimentStepLogs>, SeqError> {
+    let experiment_id: i32 = experiment_id.into_inner();
+    let mut connection = database_manager.database_connection()?;
+    Experiment::exists_err(experiment_id, &mut connection)?;
+
+    let info: ExperimentStepLogRequest = info.into_inner();
+    let log_reader = LogFileReader {
+        config: app_config,
+        experiment_id,
+        pipeline_id: info.pipeline_id,
+        step_id: info.step_id,
+    };
+
+    let build_logs = ExperimentStepLog {
+        stdout: log_reader.get(LogProcessType::Build, LogOutputType::StdOut)?,
+        stderr: log_reader.get(LogProcessType::Build, LogOutputType::StdErr)?,
+        exit_code: log_reader.get(LogProcessType::Build, LogOutputType::ExitCode)?,
+    };
+    let run_logs = ExperimentStepLog {
+        stdout: log_reader.get(LogProcessType::Run, LogOutputType::StdOut)?,
+        stderr: log_reader.get(LogProcessType::Run, LogOutputType::StdErr)?,
+        exit_code: log_reader.get(LogProcessType::Run, LogOutputType::ExitCode)?,
+    };
+    let logs = ExperimentStepLogs {
+        build: build_logs,
+        run: run_logs,
+    };
+    Ok(web::Json(logs))
+}
+
+/// A reader for log files.
+struct LogFileReader {
+    pub config: web::Data<Configuration>,
+    pub experiment_id: i32,
+    pub pipeline_id: String,
+    pub step_id: String,
+}
+
+impl LogFileReader {
+    /// Reads the respective log file to a [`String`] if it exists.
+    ///
+    /// # Parameters
+    ///
+    /// * `process_type` - the process type of the log file
+    /// * `output_type` - the output type of the log file
+    pub fn get(
+        &self,
+        process_type: LogProcessType,
+        output_type: LogOutputType,
+    ) -> Result<Option<String>, SeqError> {
+        let path = self.config.experiment_log_path(
+            self.experiment_id.to_string(),
+            &self.pipeline_id,
+            &self.step_id,
+            process_type,
+            output_type,
+        );
+        Ok(if !path.exists() {
+            None
+        } else {
+            Some(std::fs::read_to_string(path)?)
+        })
+    }
+}

+ 9 - 3
backend/src/controller/routing.rs

@@ -6,15 +6,19 @@ use crate::application::error::SeqError;
 
 use super::{
     experiment_controller::{
-        create_experiment, delete_experiment, get_experiment, get_experiment_pipelines,
-        list_experiment, patch_experiment_comment, patch_experiment_mail, patch_experiment_name,
-        patch_experiment_pipeline, post_experiment_pipeline_variable, post_execute_experiment, get_experiment_execution_status, post_experiment_execution_abort, post_experiment_execution_reset, get_experiment_pipeline_run, post_execute_experiment_step,
+        create_experiment, delete_experiment, get_experiment, get_experiment_execution_status,
+        get_experiment_pipeline_run, get_experiment_pipelines, list_experiment,
+        patch_experiment_comment, patch_experiment_mail, patch_experiment_name,
+        patch_experiment_pipeline, post_execute_experiment, post_execute_experiment_step,
+        post_experiment_execution_abort, post_experiment_execution_reset,
+        post_experiment_pipeline_variable,
     },
     file_controller::{delete_files_by_path, get_files, post_add_file, post_add_folder},
     global_data_controller::{
         create_global_data, delete_global_data, get_global_data, list_global_data,
         patch_global_data_comment, patch_global_data_name,
     },
+    log_controller::get_experiment_step_logs,
     pipeline_controller::{
         get_pipeline_blueprint, get_pipeline_blueprints, get_pipeline_instance,
         patch_pipeline_blueprints,
@@ -51,6 +55,8 @@ pub fn routing_config(cfg: &mut ServiceConfig) {
     .route("/api/experiments/{id}", web::post().to(post_execute_experiment))
     .route("/api/experiments/{id}/abort", web::post().to(post_experiment_execution_abort))
     .route("/api/experiments/{id}/comment", web::patch().to(patch_experiment_comment))
+        // This method is only POST to support the JSON message body.
+    .route("/api/experiments/{id}/logs", web::post().to(get_experiment_step_logs))
     .route("/api/experiments/{id}/mail", web::patch().to(patch_experiment_mail))
     .route("/api/experiments/{id}/name", web::patch().to(patch_experiment_name))
     .route("/api/experiments/{id}/pipeline", web::patch().to(patch_experiment_pipeline))

+ 1 - 0
backend/src/model/exchange.rs

@@ -1,5 +1,6 @@
 pub mod experiment_details;
 pub mod experiment_pipeline;
+pub mod experiment_step_logs;
 pub mod file_path;
 pub mod global_data_details;
 pub mod pipeline_step_details;

+ 34 - 0
backend/src/model/exchange/experiment_step_logs.rs

@@ -0,0 +1,34 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+/// The full logs of an experiment step.
+pub struct ExperimentStepLogs {
+    /// The build logs.
+    pub build: ExperimentStepLog,
+    /// The run logs.
+    pub run: ExperimentStepLog,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+/// The logs of a specific process of an experiment step.
+pub struct ExperimentStepLog {
+    /// The stdout output.
+    pub stdout: Option<String>,
+    /// The stderr output.
+    pub stderr: Option<String>,
+    /// The process exit code.
+    pub exit_code: Option<String>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+/// Information required to request log file
+/// for a specific pipeline step.
+pub struct ExperimentStepLogRequest {
+    /// The ID of the pipeline.
+    pub pipeline_id: String,
+    /// The ID of the pipeline step.
+    pub step_id: String,
+}

+ 92 - 21
backend/src/service/container_service.rs

@@ -3,7 +3,7 @@ use std::{
     ffi::OsString,
     io::{BufWriter, Write},
     path::Path,
-    process::{Child, Command, Output, Stdio},
+    process::{Child, Command, Output},
 };
 
 use actix_web::web;
@@ -12,7 +12,7 @@ use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl};
 
 use crate::{
     application::{
-        config::Configuration,
+        config::{Configuration, LogOutputType, LogProcessType},
         database::DatabaseManager,
         error::{SeqError, SeqErrorType},
     },
@@ -35,27 +35,65 @@ const CONTAINER_ENV_MOUNT: &str = "MOUNT_PATHS";
 /// # Parameters
 ///
 /// * `step` - the [`PipelineStepBlueprint`] to build the container for
+/// * `pipeline_id` - the ID of the containing [`PipelineBlueprint`]
 /// * `context` - the context directory contianing the pipeline
+/// * `experiment_id` - the ID of the experiment
+/// * `app_cofig` - the app [`Configuration`]
 pub fn build_pipeline_step<P: AsRef<Path>, T: AsRef<str>>(
     step: &PipelineStepBlueprint,
     pipeline_id: T,
     context: P,
+    experiment_id: i32,
+    app_config: web::Data<Configuration>,
 ) -> Result<Child, SeqError> {
     let mut pipeline_step_path = context.as_ref().to_path_buf();
     pipeline_step_path.push("container");
     pipeline_step_path.push(step.container());
     let build_arg: OsString = "build".into();
     let name_spec: OsString = "-t".into();
-    let name_arg: OsString = format_container_name(pipeline_id, step.id()).into();
+    let name_arg: OsString = format_container_name(&pipeline_id, step.id()).into();
+    let progress_arg: OsString = "--progress=plain".into();
+
+    // Create log directory.
+    let logs_path = app_config.experiment_logs_path(experiment_id.to_string());
+    std::fs::create_dir_all(&logs_path)?;
+    // Open stdout log file.
+    let log_path_stdout = app_config.experiment_log_path(
+        experiment_id.to_string(),
+        &pipeline_id,
+        step.id(),
+        LogProcessType::Build,
+        LogOutputType::StdOut,
+    );
+    let log_file_stdout = std::fs::OpenOptions::new()
+        .create(true)
+        .write(true)
+        .append(false)
+        .truncate(true)
+        .open(log_path_stdout)?;
+    // Open stderr log file.
+    let log_path_stderr = app_config.experiment_log_path(
+        experiment_id.to_string(),
+        &pipeline_id,
+        step.id(),
+        LogProcessType::Build,
+        LogOutputType::StdErr,
+    );
+    let log_file_stderr = std::fs::OpenOptions::new()
+        .create(true)
+        .write(true)
+        .append(false)
+        .truncate(true)
+        .open(log_path_stderr)?;
 
     let child = Command::new("docker")
-        .stdin(Stdio::piped())
-        .stdout(Stdio::piped())
-        .stderr(Stdio::piped())
+        .stdout(log_file_stdout)
+        .stderr(log_file_stderr)
         .args([
             build_arg.as_os_str(),
             name_spec.as_os_str(),
             name_arg.as_os_str(),
+            progress_arg.as_os_str(),
             pipeline_step_path.as_os_str(),
         ])
         .spawn()?;
@@ -160,12 +198,43 @@ pub fn run_pipeline_step<T: AsRef<str>>(
         });
 
     // Set container to run.
-    arguments.push(format_container_name(pipeline_id, step.id()).into());
+    arguments.push(format_container_name(&pipeline_id, step.id()).into());
+
+    // Create log directory.
+    let logs_path = app_config.experiment_logs_path(experiment_id.to_string());
+    std::fs::create_dir_all(&logs_path)?;
+    // Open stdout log file.
+    let log_path_stdout = app_config.experiment_log_path(
+        experiment_id.to_string(),
+        &pipeline_id,
+        step.id(),
+        LogProcessType::Run,
+        LogOutputType::StdOut,
+    );
+    let log_file_stdout = std::fs::OpenOptions::new()
+        .create(true)
+        .write(true)
+        .append(false)
+        .truncate(true)
+        .open(log_path_stdout)?;
+    // Open stderr log file.
+    let log_path_stderr = app_config.experiment_log_path(
+        experiment_id.to_string(),
+        &pipeline_id,
+        step.id(),
+        LogProcessType::Run,
+        LogOutputType::StdErr,
+    );
+    let log_file_stderr = std::fs::OpenOptions::new()
+        .create(true)
+        .write(true)
+        .append(false)
+        .truncate(true)
+        .open(log_path_stderr)?;
 
     let output = Command::new("docker")
-        .stdin(Stdio::piped())
-        .stdout(Stdio::piped())
-        .stderr(Stdio::piped())
+        .stdout(log_file_stdout)
+        .stderr(log_file_stderr)
         .args(arguments)
         .spawn()?;
     Ok(output)
@@ -404,18 +473,18 @@ impl ContainerHandler {
     ///
     /// * `output` - the output to parse and log
     /// * `build` - ```true``` if the build output is parsed, ```false``` if the run output is parsed
-    fn parse_output(&self, output: Output, build: bool) -> Result<(), SeqError> {
+    fn parse_output(&self, output: Output, process_type: LogProcessType) -> Result<(), SeqError> {
         if let Some(step) = &self.executed_step {
             let logs_path = self
                 .config
                 .experiment_logs_path(step.experiment_id.to_string());
-            let process_type = if build { "build" } else { "run" };
             std::fs::create_dir_all(&logs_path)?;
             let log_path = self.config.experiment_log_path(
                 step.experiment_id.to_string(),
                 &step.pipeline_id,
                 &step.pipeline_step_id,
-                &process_type,
+                process_type,
+                LogOutputType::ExitCode,
             );
             let log_file = std::fs::OpenOptions::new()
                 .create(true)
@@ -424,12 +493,12 @@ impl ContainerHandler {
                 .truncate(true)
                 .open(log_path)?;
             let mut buffered_writer = BufWriter::new(log_file);
-            buffered_writer.write_all("[[ STDOUT ]]\n".as_bytes())?;
-            buffered_writer.write_all(&output.stdout)?;
-            buffered_writer.write_all("\n\n[[ STDERR ]]\n".as_bytes())?;
-            buffered_writer.write_all(&output.stderr)?;
-            buffered_writer.write_all("\n\n[[ EXIT STATUS ]]\n".as_bytes())?;
-            buffered_writer.write_all(output.status.to_string().as_bytes())?;
+            let exit_code = output
+                .status
+                .code()
+                .map(|code| code.to_string())
+                .unwrap_or("Terminated by signal".to_string());
+            buffered_writer.write_all(exit_code.as_bytes())?;
             if output.status.success() {
                 Ok(())
             } else {
@@ -476,7 +545,7 @@ impl ContainerHandler {
             ProcessStatus::Finished => {
                 if let Some(run) = self.run_process.take() {
                     // Handle output.
-                    self.parse_output(run.wait_with_output()?, false)?;
+                    self.parse_output(run.wait_with_output()?, LogProcessType::Run)?;
                     // Sets the status to finished.
                     let mut connection = self.database_manager.database_connection()?;
                     connection.immediate_transaction(|connection| {
@@ -508,7 +577,7 @@ impl ContainerHandler {
                 ProcessStatus::Finished => {
                     if let Some(build) = self.build_process.take() {
                         // Handle output.
-                        self.parse_output(build.wait_with_output()?, true)?;
+                        self.parse_output(build.wait_with_output()?, LogProcessType::Build)?;
                         // Start the subsequent run process.
                         self.start_run_process()?;
                         Ok(false)
@@ -547,6 +616,8 @@ impl ContainerHandler {
                     step_blueprint,
                     &step.pipeline_id,
                     pipeline.context(),
+                    step.experiment_id,
+                    web::Data::clone(&self.config),
                 )?);
                 Ok(())
             } else {

+ 23 - 2
frontend/src/components/experiment/ExperimentRunDetails.vue

@@ -82,8 +82,27 @@
           <div v-html="selectedStep.description" />
         </div>
       </q-card-section>
+      <q-card-section v-if="selectedStep && pipeline">
+        <q-expansion-item
+          expand-separator
+          :icon="symOutlinedTerminal"
+          label="Display pipeline step logs"
+          class="shadow-1 overflow-hidden"
+          header-class="bg-secondary text-white"
+          style="border-radius: 3px"
+        >
+          <q-card>
+            <q-card-section>
+              <experiment-step-logs
+                :experiment-id="id"
+                :pipeline-id="pipeline.id"
+                :step-id="selectedStep.id"
+              />
+            </q-card-section>
+          </q-card>
+        </q-expansion-item>
+      </q-card-section>
       <div v-if="selectedStep" class="q-gutter-md q-pa-md col">
-        <q-btn label="Display logs" class="row" />
         <q-btn label="Download output" class="row" />
 
         <q-btn
@@ -126,8 +145,10 @@ import {
   symOutlinedError,
   symOutlinedNotStarted,
   symOutlinedStopCircle,
+  symOutlinedTerminal,
 } from "@quasar/extras/material-symbols-outlined";
 import { matRestartAlt } from "@quasar/extras/material-icons";
+import ExperimentStepLogs from "./ExperimentStepLogs.vue";
 
 // The intervall in which pipeline updates are requested from the server.
 const POLLING_INTERVALL_MILLISECONDS = 10000;
@@ -325,7 +346,7 @@ function canBeStarted(step: PipelineStepBlueprint | null): boolean {
 
 /**
  * Tries to restart the specified step.
- * 
+ *
  * @param step the step to restart
  */
 function restartStep(step: PipelineStepBlueprint | null) {

+ 128 - 0
frontend/src/components/experiment/ExperimentStepLogs.vue

@@ -0,0 +1,128 @@
+<template>
+  <div class="no-wrap">
+    <q-tabs v-model="tab" narrow-indicator dense inline-label align="justify">
+      <q-tab
+        class="text-purple"
+        name="build"
+        :icon="symOutlinedBuildCircle"
+        label="Container build process"
+      />
+      <q-tab
+        class="text-orange"
+        name="run"
+        :icon="symOutlinedRunCircle"
+        label="Step run process"
+      />
+    </q-tabs>
+
+    <q-tab-panels v-model="tab" animated>
+      <q-tab-panel name="build">
+        <split-log-display v-if="logs" :log="logs.build" />
+      </q-tab-panel>
+
+      <q-tab-panel name="run">
+        <split-log-display v-if="logs" :log="logs.run" />
+      </q-tab-panel>
+    </q-tab-panels>
+  </div>
+</template>
+
+<script setup lang="ts">
+import { type ErrorResponse, type ExperimentStepLogs } from "@/scripts/types";
+import SplitLogDisplay from "@/components/shared/SplitLogDisplay.vue";
+import { onBeforeRouteLeave, useRouter } from "vue-router";
+import { ref, watch, type Ref, onBeforeUnmount } from "vue";
+import axios from "axios";
+import {
+  symOutlinedBuildCircle,
+  symOutlinedRunCircle,
+} from "@quasar/extras/material-symbols-outlined";
+
+// The intervall in which log updates are requested from the server.
+const POLLING_INTERVALL_MILLISECONDS = 10000;
+
+const isPolling = ref(false);
+const pollingError: Ref<ErrorResponse | null> = ref(null);
+const showPollingError = ref(false);
+const router = useRouter();
+const this_route = router.currentRoute.value.fullPath;
+const pollingTimer: Ref<number | null> = ref(null);
+const logs: Ref<ExperimentStepLogs | null> = ref(null);
+const tab = ref("build");
+
+const props = defineProps({
+  experimentId: { type: String, required: true },
+  pipelineId: { type: String, required: true },
+  stepId: { type: String, required: true },
+});
+
+watch(
+  () => props.stepId,
+  () => {
+    stopPolling();
+    logs.value = null;
+    pollLogChanges();
+  },
+  { immediate: true }
+);
+
+// Clears the timer if the route is changed.
+onBeforeRouteLeave(() => {
+  stopPolling();
+});
+
+onBeforeUnmount(() => {
+  stopPolling();
+});
+
+function stopPolling() {
+  if (pollingTimer.value !== null) {
+    clearTimeout(pollingTimer.value);
+    pollingTimer.value = null;
+  }
+}
+
+/**
+ * Conitinuesly polls changes from the server.
+ */
+function pollLogChanges() {
+  if (
+    !isPolling.value &&
+    !pollingError.value &&
+    // Stop polling if the route changes.
+    router.currentRoute.value.fullPath === this_route
+  ) {
+    isPolling.value = true;
+    pollingError.value = null;
+    const config = {
+      headers: {
+        "content-type": "application/json",
+      },
+    };
+    axios
+      .post(
+        "/api/experiments/" + props.experimentId + "/logs",
+        JSON.stringify({
+          pipelineId: props.pipelineId,
+          stepId: props.stepId,
+        }),
+        config
+      )
+      .then((response) => {
+        logs.value = response.data;
+        pollingTimer.value = window.setTimeout(
+          pollLogChanges,
+          POLLING_INTERVALL_MILLISECONDS
+        );
+      })
+      .catch((error) => {
+        showPollingError.value = true;
+        pollingError.value = error.response.data;
+      })
+      .finally(() => {
+        isPolling.value = false;
+      });
+  }
+}
+</script>
+<style scoped lang="scss"></style>

+ 30 - 0
frontend/src/components/shared/LogDisplay.vue

@@ -0,0 +1,30 @@
+<template>
+  <div>
+    <div class="text-h6 q-ma-md">{{ header }}</div>
+    <q-separator />
+    <div v-if="body === null || body === undefined" class="q-pa-md">
+      No logs are currently present.
+    </div>
+    <div
+      v-else
+      v-text="body"
+      class="q-pa-md"
+      style="
+        white-space: pre;
+        overflow: auto;
+        max-height: 80vh;
+        font-family: 'Courier New', Courier, monospace;
+      "
+    />
+  </div>
+</template>
+
+<script setup lang="ts">
+import type { PropType } from "vue";
+
+defineProps({
+  header: { type: String, required: true },
+  body: { type: String as PropType<string | null | undefined>, required: true },
+});
+</script>
+<style scoped lang="scss"></style>

+ 26 - 0
frontend/src/components/shared/SplitLogDisplay.vue

@@ -0,0 +1,26 @@
+<template>
+  <div style="width: 100%">
+    <q-splitter v-model="splitterModel" :limits="[0, 100]">
+      <template v-slot:before>
+        <log-display header="Standard output" :body="log.stdout" />
+      </template>
+
+      <template v-slot:after>
+        <log-display header="Standard error" :body="log.stderr" />
+      </template>
+    </q-splitter>
+  </div>
+</template>
+
+<script setup lang="ts">
+import { ref, type PropType, type Ref } from "vue";
+import LogDisplay from "./LogDisplay.vue";
+import type { ExperimentStepLog } from "@/scripts/types";
+
+defineProps({
+  log: { type: Object as PropType<ExperimentStepLog>, required: true },
+});
+
+const splitterModel: Ref<number> = ref(50);
+</script>
+<style scoped lang="scss"></style>

+ 11 - 0
frontend/src/scripts/types.ts

@@ -55,3 +55,14 @@ export enum ExperimentExecutionStatus {
   Waiting = "Waiting",
   None = "None",
 }
+
+export type ExperimentStepLog = {
+  stdout: string | null | undefined;
+  stderr: string | null | undefined;
+  exitCode: string | null | undefined;
+};
+
+export type ExperimentStepLogs = {
+  build: ExperimentStepLog;
+  run: ExperimentStepLog;
+};