From 5a1e3bcd72940b469ed2b889475c152f52ef9019 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 3 Jul 2024 11:51:42 +0200 Subject: [PATCH 01/24] update dependencies --- environment.yml | 13 +++++-------- requirements.txt | 9 +++------ 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/environment.yml b/environment.yml index ef14bf44b..5ec4ef070 100644 --- a/environment.yml +++ b/environment.yml @@ -3,15 +3,12 @@ name: streamlit-env channels: - conda-forge dependencies: - - python==3.10 - - plotly==5.18.0 - - pip==23.3 - - numpy==1.25.2 - - pandas==2.1.2 + - python==3.11 + - plotly==5.22.0 + - pip==24.0 + - numpy==1.26.4 # pandas and numpy are dependencies of pyopenms, however, pyopenms needs numpy<=1.26.4 - mono==6.12.0.90 - pip: # dependencies only available through pip # streamlit dependencies - - streamlit==1.29.0 - - streamlit-plotly-events==0.0.6 - - streamlit-aggrid==0.3.4.post3 + - streamlit==1.36.0 - captcha==0.5.0 diff --git a/requirements.txt b/requirements.txt index ac7acd635..11aa933ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,8 @@ # the requirements.txt file is intended for deployment on streamlit cloud and if the simple container is built # note that it is much more restricted in terms of installing third-parties / etc. # preferably use the batteries included or simple docker file for local hosting -streamlit==1.34.0 -streamlit-plotly-events==0.0.6 -streamlit-aggrid==0.3.4.post3 -pandas==2.1.2 -numpy==1.25.2 -plotly==5.18.0 +streamlit==1.36.0 pyopenms==3.1.0 +numpy==1.26.4 # pandas and numpy are dependencies of pyopenms, however, pyopenms needs numpy<=1.26.4 +plotly==5.22.0 captcha==0.5.0 \ No newline at end of file From cc4e1a0f3185544390d1ed5c54b324066a4d7a23 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Tue, 9 Jul 2024 11:26:36 +0200 Subject: [PATCH 02/24] update raw data viewer - remove plotly-events and aggrid - 2D peak map updating while zooming in - 3D peak map static plot on zoom selection of 2D plot - spectrum viewer, select spectrum from table, update top 5 m/z annotations zooming in - chromatogram viewer with BPC, TIC and XIC - every section in separate fragments --- hooks/hook-streamlit.py | 1 - "pages/11_\360\237\221\200_View_Raw_Data.py" | 87 +---- run_app_temp.spec | 1 - src/common.py | 70 ++-- src/plotting/.gitignore | 1 + src/plotting/BasePlotter.py | 58 ++++ src/plotting/MSExperimentPlotter.py | 214 ++++++++++++ src/view.py | 329 ++++++++++++------- win_exe_with_pyinstaller.md | 2 - 9 files changed, 551 insertions(+), 212 deletions(-) create mode 100644 src/plotting/.gitignore create mode 100644 src/plotting/BasePlotter.py create mode 100644 src/plotting/MSExperimentPlotter.py diff --git a/hooks/hook-streamlit.py b/hooks/hook-streamlit.py index 7cb471263..11bf2b6c5 100644 --- a/hooks/hook-streamlit.py +++ b/hooks/hook-streamlit.py @@ -2,7 +2,6 @@ datas = [] datas += copy_metadata("streamlit") -datas += copy_metadata("streamlit_plotly_events") datas += copy_metadata("pyopenms") datas += copy_metadata("captcha") datas += copy_metadata("pyarrow") diff --git "a/pages/11_\360\237\221\200_View_Raw_Data.py" "b/pages/11_\360\237\221\200_View_Raw_Data.py" index 9e6809142..f2ae9c2fa 100755 --- "a/pages/11_\360\237\221\200_View_Raw_Data.py" +++ "b/pages/11_\360\237\221\200_View_Raw_Data.py" @@ -1,9 +1,8 @@ from pathlib import Path -from streamlit_plotly_events import plotly_events import streamlit as st -from src.common import page_setup, v_space, show_fig, save_params +from src.common import page_setup from src import view from src.captcha_ import captcha_control @@ -16,78 +15,24 @@ captcha_control() st.title("View raw MS data") -selected_file = st.selectbox( + +# File selection can not be in fragment since it influences the subsequent sections +cols = st.columns(3) +selected_file = cols[0].selectbox( "choose file", [f.name for f in Path(st.session_state.workspace, "mzML-files").iterdir()], + key="view_selected_file" ) if selected_file: - df = view.get_df(Path(st.session_state.workspace, "mzML-files", selected_file)) - df_MS1, df_MS2 = ( - df[df["mslevel"] == 1], - df[df["mslevel"] == 2], - ) - - if not df_MS1.empty: - tabs = st.tabs( - ["πŸ“ˆ Base peak chromatogram and MS1 spectra", "πŸ“ˆ Peak map and MS2 spectra"] - ) - with tabs[0]: - # BPC and MS1 spec - st.markdown("πŸ’‘ Click a point in the BPC to show the MS1 spectrum.") - bpc_fig = view.plot_bpc(df_MS1) - - # Determine RT positions from clicks in BPC to show MS1 at this position - bpc_points = plotly_events(bpc_fig) - if bpc_points: - ms1_rt = bpc_points[0]["x"] - else: - ms1_rt = df_MS1.loc[0, "RT"] - - spec = df_MS1.loc[df_MS1["RT"] == ms1_rt].squeeze() + view.get_df(Path(st.session_state.workspace, "mzML-files", selected_file)) - title = f"MS1 spectrum @RT {spec['RT']}" - fig = view.plot_ms_spectrum( - spec, - title, - "#EF553B", - ) - show_fig(fig, title.replace(" ", "_")) - with tabs[1]: - c1, c2 = st.columns(2) - c1.number_input( - "2D map intensity cutoff", - 1000, - 1000000000, - params["2D-map-intensity-cutoff"], - 1000, - key="2D-map-intensity-cutoff", - ) - v_space(1, c2) - c2.markdown("πŸ’‘ Click anywhere to show the closest MS2 spectrum.") - map2D = view.plot_2D_map( - df_MS1, - df_MS2, - st.session_state["2D-map-intensity-cutoff"], - ) - map_points = plotly_events(map2D) - # Determine RT and mz positions from clicks in the map to get closest MS2 spectrum - if not df_MS2.empty: - if map_points: - rt = map_points[0]["x"] - prec_mz = map_points[0]["y"] - else: - rt = df_MS2.iloc[0, 2] - prec_mz = df_MS2.iloc[0, 0] - spec = df_MS2.loc[ - ( - abs(df_MS2["RT"] - rt) + abs(df_MS2["precursormz"] - prec_mz) - ).idxmin(), - :, - ] - title = f"MS2 spectrum @precursor m/z {round(spec['precursormz'], 4)} @RT {round(spec['RT'], 2)}" - - ms2_fig = view.plot_ms_spectrum(spec, title, "#00CC96") - show_fig(ms2_fig, title.replace(" ", "_")) - -save_params(params) +tabs = st.tabs( + ["πŸ“ˆ Peak map (MS1)", "πŸ“ˆ Spectra (MS1 + MS2)", "πŸ“ˆ Chromatograms (MS1)"] +) +with tabs[0]: + view.view_peak_map() +with tabs[1]: + view.view_spectrum() +with tabs[2]: + view.view_bpc_tic() diff --git a/run_app_temp.spec b/run_app_temp.spec index 4ccd4bb17..2afc7a48f 100644 --- a/run_app_temp.spec +++ b/run_app_temp.spec @@ -12,7 +12,6 @@ a = Analysis( ("./myenv/Lib/site-packages/altair/vegalite/v5/schema/vega-lite-schema.json","./altair/vegalite/v5/schema/"), ("./myenv/Lib/site-packages/streamlit/static", "./streamlit/static"), ("./myenv/Lib/site-packages/streamlit/runtime", "./streamlit/runtime"), - ("./myenv/Lib/site-packages/streamlit_plotly_events", "./streamlit_plotly_events/"), ("./myenv/Lib/site-packages/pyopenms", "./pyopenms/"), ("./myenv/Lib/site-packages/captcha", "./captcha/"), ("./myenv/Lib/site-packages/pyarrow", "./pyarrow/"), diff --git a/src/common.py b/src/common.py index 6be9bfd64..87c294dbb 100644 --- a/src/common.py +++ b/src/common.py @@ -269,7 +269,7 @@ def show_table(df: pd.DataFrame, download_name: str = "") -> None: return df -def show_fig(fig, download_name: str, container_width: bool = True) -> None: +def show_fig(fig, download_name: str, container_width: bool = True, selection_session_state_key: str = "") -> None: """ Displays a Plotly chart and adds a download button to the plot. @@ -277,32 +277,58 @@ def show_fig(fig, download_name: str, container_width: bool = True) -> None: fig (plotly.graph_objs._figure.Figure): The Plotly figure to display. download_name (str): The name for the downloaded file. container_width (bool, optional): If True, the figure will use the container width. Defaults to True. + selection_session_state_key (str, optional): If set, save the rectangular selection to session state with this key. Returns: None """ - # Display plotly chart using container width and removed controls except for download - st.plotly_chart( - fig, - use_container_width=container_width, - config={ - "displaylogo": False, - "modeBarButtonsToRemove": [ - "zoom", - "pan", - "select", - "lasso", - "zoomin", - "autoscale", - "zoomout", - "resetscale", - ], - "toImageButtonOptions": { - "filename": download_name, - "format": st.session_state["image-format"], + if not selection_session_state_key: + st.plotly_chart( + fig, + use_container_width=container_width, + config={ + "displaylogo": False, + "modeBarButtonsToRemove": [ + "zoom", + "pan", + "select", + "lasso", + "zoomin", + "autoscale", + "zoomout", + "resetscale", + ], + "toImageButtonOptions": { + "filename": download_name, + "format": st.session_state["image-format"], + }, }, - }, - ) + ) + else: + st.plotly_chart( + fig, + key=selection_session_state_key, + selection_mode=["points", "box"], + on_select="rerun", + config={ + "displaylogo": False, + "modeBarButtonsToRemove": [ + "zoom", + "pan", + "lasso", + "zoomin", + "autoscale", + "zoomout", + "resetscale", + "select" + ], + "toImageButtonOptions": { + "filename": download_name, + "format": st.session_state["image-format"], + }, + }, + use_container_width=True + ) def reset_directory(path: Path) -> None: diff --git a/src/plotting/.gitignore b/src/plotting/.gitignore new file mode 100644 index 000000000..763624ebe --- /dev/null +++ b/src/plotting/.gitignore @@ -0,0 +1 @@ +__pycache__/* \ No newline at end of file diff --git a/src/plotting/BasePlotter.py b/src/plotting/BasePlotter.py new file mode 100644 index 000000000..12a30f0c4 --- /dev/null +++ b/src/plotting/BasePlotter.py @@ -0,0 +1,58 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum +from typing import Literal, List +import numpy as np + +# A colorset suitable for color blindness +class Colors(str, Enum): + BLUE = "#4575B4" + RED = "#D73027" + LIGHTBLUE = "#91BFDB" + ORANGE = "#FC8D59" + PURPLE = "#7B2C65" + YELLOW = "#FCCF53" + DARKGRAY = "#555555" + LIGHTGRAY = "#BBBBBB" + + +@dataclass(kw_only=True) +class _BasePlotterConfig(ABC): + title: str = "1D Plot" + xlabel: str = "X-axis" + ylabel: str = "Y-axis" + height: int = 500 + width: int = 500 + relative_intensity: bool = False + show_legend: bool = True + + +# Abstract Class for Plotting +class _BasePlotter(ABC): + def __init__(self, config: _BasePlotterConfig) -> None: + self.config = config + self.fig = None # holds the figure object + + def updateConfig(self, **kwargs): + for key, value in kwargs.items(): + if hasattr(self.config, key): + setattr(self.config, key, value) + else: + raise ValueError(f"Invalid config setting: {key}") + + def _get_n_grayscale_colors(self, n: int) -> List[str]: + """Returns n evenly spaced grayscale colors in hex format.""" + hex_list = [] + for v in np.linspace(50, 200, n): + hex = "#" + for _ in range(3): + hex += f"{int(round(v)):02x}" + hex_list.append(hex) + return hex_list + + def plot(self, data, **kwargs): + return self._plot(data, **kwargs) + + @abstractmethod + def _plot(self, data, **kwargs): + pass \ No newline at end of file diff --git a/src/plotting/MSExperimentPlotter.py b/src/plotting/MSExperimentPlotter.py new file mode 100644 index 000000000..7cbf0fba1 --- /dev/null +++ b/src/plotting/MSExperimentPlotter.py @@ -0,0 +1,214 @@ +from dataclasses import dataclass +from typing import Literal, Union + +import matplotlib.pyplot as plt +import pandas as pd +import numpy as np +import plotly.graph_objects as go + +from .BasePlotter import Colors, _BasePlotter, _BasePlotterConfig + + +@dataclass(kw_only=True) +class MSExperimentPlotterConfig(_BasePlotterConfig): + bin_peaks: Union[Literal["auto"], bool] = "auto" + num_RT_bins: int = 50 + num_mz_bins: int = 50 + plot3D: bool = False + + +class MSExperimentPlotter(_BasePlotter): + def __init__(self, config: MSExperimentPlotterConfig, **kwargs) -> None: + """ + Initialize the MSExperimentPlotter with a given configuration and optional parameters. + + Args: + config (MSExperimentPlotterConfig): Configuration settings for the spectrum plotter. + **kwargs: Additional keyword arguments for customization. + """ + super().__init__(config=config, **kwargs) + + def _prepare_data(self, exp: pd.DataFrame) -> pd.DataFrame: + """Prepares data for plotting based on configuration (binning, relative intensity, hover text).""" + if self.config.bin_peaks == True or ( + exp.shape[0] > self.config.num_mz_bins * self.config.num_RT_bins + and self.config.bin_peaks == "auto" + ): + exp["mz"] = pd.cut(exp["mz"], bins=self.config.num_mz_bins) + exp["RT"] = pd.cut(exp["RT"], bins=self.config.num_RT_bins) + + # Group by x and y bins and calculate the mean intensity within each bin + exp = ( + exp.groupby(["mz", "RT"], observed=True) + .agg({"inty": "mean"}) + .reset_index() + ) + exp["mz"] = exp["mz"].apply(lambda interval: interval.mid).astype(float) + exp["RT"] = exp["RT"].apply(lambda interval: interval.mid).astype(float) + exp = exp.fillna(0) + else: + self.config.bin_peaks = False + + if self.config.relative_intensity: + exp["inty"] = exp["inty"] / max(exp["inty"]) * 100 + + exp["hover_text"] = exp.apply( + lambda x: f"m/z: {round(x['mz'], 6)}
RT: {round(x['RT'], 2)}
intensity: {int(x['inty'])}", + axis=1, + ) + + return exp.sort_values("inty") + + def _plotMatplotlib3D( + self, + exp: pd.DataFrame, + ) -> plt.Figure: + """Plot 3D peak map with mz, RT and intensity dimensions. Colored peaks based on intensity.""" + fig = plt.figure( + figsize=(self.config.width / 100, self.config.height / 100), + layout="constrained", + ) + ax = fig.add_subplot(111, projection="3d") + + if self.config.title: + ax.set_title(self.config.title, fontsize=12, loc="left") + ax.set_xlabel( + self.config.ylabel, + fontsize=9, + labelpad=-2, + color=Colors["DARKGRAY"], + style="italic", + ) + ax.set_ylabel( + self.config.xlabel, + fontsize=9, + labelpad=-2, + color=Colors["DARKGRAY"], + ) + ax.set_zlabel("intensity", fontsize=10, color=Colors["DARKGRAY"], labelpad=-2) + for axis in ("x", "y", "z"): + ax.tick_params(axis=axis, labelsize=8, pad=-2, colors=Colors["DARKGRAY"]) + + ax.set_box_aspect(aspect=None, zoom=0.88) + ax.ticklabel_format(axis="z", style="sci", useMathText=True, scilimits=(0,0)) + ax.grid(color="#FF0000", linewidth=0.8) + ax.xaxis.pane.fill = False + ax.yaxis.pane.fill = False + ax.zaxis.pane.fill = False + ax.view_init(elev=25, azim=-45, roll=0) + + # Plot lines to the bottom with colored based on inty + for i in range(len(exp)): + ax.plot( + [exp["RT"].iloc[i], exp["RT"].iloc[i]], + [exp["inty"].iloc[i], 0], + [exp["mz"].iloc[i], exp["mz"].iloc[i]], + zdir="x", + color=plt.cm.magma_r((exp["inty"].iloc[i] / exp["inty"].max())), + ) + return fig + + def _plotPlotly2D( + self, + exp: pd.DataFrame, + ) -> go.Figure: + """Plot 2D peak map with mz and RT dimensions. Colored peaks based on intensity.""" + layout = go.Layout( + title=dict(text=self.config.title), + xaxis=dict(title=self.config.xlabel), + yaxis=dict(title=self.config.ylabel), + showlegend=self.config.show_legend, + template="simple_white", + dragmode="select", + height=self.config.height, + width=self.config.width, + ) + fig = go.Figure(layout=layout) + fig.add_trace( + go.Scattergl( + name="peaks", + x=exp["RT"], + y=exp["mz"], + mode="markers", + marker=dict( + color=exp["inty"].apply(lambda x: np.log(x)), + colorscale="sunset", + size=8, + symbol="square", + colorbar=( + dict(thickness=8, outlinewidth=0, tickformat=".0e") + if self.config.show_legend + else None + ), + ), + hovertext=exp["hover_text"] if not self.config.bin_peaks else None, + hoverinfo="text", + showlegend=False, + ) + ) + return fig + + def _plot( + self, + exp: pd.DataFrame, + ) -> go.Figure: + """Prepares data and returns Plotly 2D plot or Matplotlib 3D plot.""" + exp = self._prepare_data(exp) + if self.config.plot3D: + return self._plotMatplotlib3D(exp) + return self._plotPlotly2D(exp) + +# ============================================================================= # +## FUNCTIONAL API ## +# ============================================================================= # + + +def plotMSExperiment( + exp: pd.DataFrame, + plot3D: bool = False, + relative_intensity: bool = False, + bin_peaks: Union[Literal["auto"], bool] = "auto", + num_RT_bins: int = 50, + num_mz_bins: int = 50, + width: int = 750, + height: int = 500, + title: str = "Peak Map", + xlabel: str = "RT (s)", + ylabel: str = "m/z", + show_legend: bool = False, +): + """ + Plots a Spectrum from an MSSpectrum object + + Args: + spectrum (pd.DataFrame): OpenMS MSSpectrum Object + plot3D: (bool = False, optional): Plot peak map 3D with peaks colored based on intensity. Disables colorbar legend. Works with "MATPLOTLIB" engine only. Defaults to False. + relative_intensity (bool, optional): If true, plot relative intensity values. Defaults to False. + bin_peaks: (Union[Literal["auto"], bool], optional): Bin peaks to reduce complexity and improve plotting speed. Hovertext disabled if activated. If set to "auto" any MSExperiment with more then num_RT_bins x num_mz_bins peaks will be binned. Defaults to "auto". + num_RT_bins: (int, optional): Number of bins in RT dimension. Defaults to 50. + num_mz_bins: (int, optional): Number of bins in m/z dimension. Defaults to 50. + width (int, optional): Width of plot. Defaults to 500px. + height (int, optional): Height of plot. Defaults to 500px. + title (str, optional): Plot title. Defaults to "Spectrum Plot". + xlabel (str, optional): X-axis label. Defaults to "m/z". + ylabel (str, optional): Y-axis label. Defaults to "intensity" or "ion mobility". + show_legend (int, optional): Show legend. Defaults to False. + + Returns: + Plot: The generated plot using the specified engine. + """ + config = MSExperimentPlotterConfig( + plot3D=plot3D, + relative_intensity=relative_intensity, + bin_peaks=bin_peaks, + num_RT_bins=num_RT_bins, + num_mz_bins=num_mz_bins, + width=width, + height=height, + title=title, + xlabel=xlabel, + ylabel=ylabel, + show_legend=show_legend, + ) + plotter = MSExperimentPlotter(config) + return plotter.plot(exp.copy()) \ No newline at end of file diff --git a/src/view.py b/src/view.py index 770b58d20..1386b45fc 100644 --- a/src/view.py +++ b/src/view.py @@ -5,10 +5,12 @@ import plotly.graph_objects as go import streamlit as st import pyopenms as poms +from .plotting.MSExperimentPlotter import plotMSExperiment +from .common import show_fig from typing import Union -@st.cache_data + def get_df(file: Union[str, Path]) -> pd.DataFrame: """ Load a Mass Spectrometry (MS) experiment from a given mzML file and return @@ -25,131 +27,94 @@ def get_df(file: Union[str, Path]) -> pd.DataFrame: """ exp = poms.MSExperiment() poms.MzMLFile().load(str(file), exp) - df = exp.get_df() - # MSlevel for each scan - df.insert(0, "mslevel", [spec.getMSLevel() for spec in exp]) - # Precursor m/z for each scan - df.insert( - 0, - "precursormz", - [ - spec.getPrecursors()[0].getMZ() if spec.getPrecursors() else 0 - for spec in exp - ], + df_spectra = exp.get_df() + df_spectra["MS level"] = [spec.getMSLevel() for spec in exp] + precs = [] + for spec in exp: + p = spec.getPrecursors() + if p: + precs.append(p[0].getMZ()) + else: + precs.append(np.nan) + df_spectra["precursor m/z"] = precs + df_spectra["max intensity m/z"] = df_spectra.apply( + lambda x: x["mzarray"][x["intarray"].argmax()], axis=1 ) - if not df.empty: - return df - return pd.DataFrame() - - -@st.cache_resource -def plot_2D_map(df_ms1: pd.DataFrame, df_ms2: pd.DataFrame, cutoff: int) -> go.Figure: - """ - Plots a 2D peak map. + if not df_spectra.empty: + st.session_state["view_spectra"] = df_spectra + else: + st.session_state["view_spectra"] = pd.DataFrame() + exp_ms2 = poms.MSExperiment() + exp_ms1 = poms.MSExperiment() + for spec in exp: + if spec.getMSLevel() == 1: + exp_ms1.addSpectrum(spec) + elif spec.getMSLevel() == 2: + exp_ms2.addSpectrum(spec) + if not exp_ms1.empty(): + st.session_state["view_ms1"] = exp_ms1.get_df(long=True) + else: + st.session_state["view_ms1"] = pd.DataFrame() + if not exp_ms2.empty(): + st.session_state["view_ms2"] = exp_ms2.get_df(long=True) + else: + st.session_state["view_ms2"] = pd.DataFrame() - This function takes two dataframes (`df_ms1` and `df_ms2`) and a cutoff value (`cutoff`) as input, and - returns a plotly Figure object containing a 2D peak map. +def plot_bpc_tic() -> go.Figure: + """Plot the base peak and total ion chromatogram (TIC). - Args: - df_ms1 (pd.DataFrame): A pandas DataFrame containing the MS1 peak information. - df_ms2 (pd.DataFrame): A pandas DataFrame containing the MS2 peak information. - cutoff (int): The cutoff threshold for the intensity filter. - - Returns - ------- - fig : plotly.graph_objs._figure.Figure - The plotly Figure object containing the 2D peak map. + Returns: + A plotly Figure object containing the BPC and TIC plot. """ fig = go.Figure() - # Get all intensities in a 1D array - ints = np.concatenate([df_ms1.loc[index, "intarray"] for index in df_ms1.index]) - # Keep intensities over cutoff threshold - int_filter = ints > cutoff - ints = ints[int_filter] - # Based on the intensity filter, filter mz and RT values as well - mzs = np.concatenate([df_ms1.loc[index, "mzarray"] for index in df_ms1.index])[ - int_filter - ] - rts = np.concatenate( - [ - np.full(len(df_ms1.loc[index, "mzarray"]), df_ms1.loc[index, "RT"]) - for index in df_ms1.index - ] - )[int_filter] - # Sort in ascending order to plot highest intensities last - sort = np.argsort(ints) - ints = ints[sort] - mzs = mzs[sort] - rts = rts[sort] - # Use Scattergl (webgl) for efficient scatter plot - fig.add_trace( - go.Scattergl( - name="peaks", - x=rts, - y=mzs, - mode="markers", - marker_color=ints, - marker_symbol="square", + if st.session_state.view_tic: + df = st.session_state.view_ms1.groupby("RT").sum().reset_index() + fig.add_scatter( + x=df["RT"], + y=df["inty"], + mode="lines", + line=dict(color="#f24c5c", width=3), # OpenMS red + name="TIC", + showlegend=True, ) - ) - # Add MS2 precursors as green markers - fig.add_trace( - go.Scattergl( - name="peaks", - x=df_ms2["RT"], - y=df_ms2["precursormz"], - mode="markers", - marker_color="#00FF00", - marker_symbol="x", + if st.session_state.view_bpc: + df = st.session_state.view_ms1.groupby("RT").max().reset_index() + fig.add_scatter( + x=df["RT"], + y=df["inty"], + mode="lines", + line=dict(color="#2d3a9d", width=3), # OpenMS blue + name="BPC", + showlegend=True, ) - ) - fig.update_layout( - xaxis_title="retention time", - yaxis_title="m/z", - plot_bgcolor="rgb(255,255,255)", - showlegend=False, - ) - fig.layout.template = "plotly_white" - # Set color scale - color_scale = [ - (0.00, "rgba(233, 233, 233, 1.0)"), - (0.01, "rgba(243, 236, 166, 1.0)"), - (0.1, "rgba(255, 168, 0, 1.0)"), - (0.2, "rgba(191, 0, 191, 1.0)"), - (0.4, "rgba(68, 0, 206, 1.0)"), - (1.0, "rgba(33, 0, 101, 1.0)"), - ] - fig.update_traces( - marker_colorscale=color_scale, - hovertext=ints.round(), - selector={"type": 'scattergl'}, - ) - return fig + if st.session_state.view_eic: + df = st.session_state.view_ms1 + target_value = st.session_state.view_eic_mz.strip().replace(",", ".") + try: + target_value = float(target_value) + ppm_tolerance = st.session_state.view_eic_ppm + tolerance = (target_value * ppm_tolerance) / 1e6 + # Filter the DataFrame + df_eic = df[(df['mz'] >= target_value - tolerance) & (df['mz'] <= target_value + tolerance)] + if not df_eic.empty: + fig.add_scatter( + x=df_eic["RT"], + y=df_eic["inty"], + mode="lines", + line=dict(color="#f6bf26", width=3), + name="XIC", + showlegend=True, + ) + except: + st.error("Invalid m/z value.") -@st.cache_resource -def plot_bpc(df: pd.DataFrame) -> go.Figure: - """Plot the base peak chromatogram (BPC) from a given dataframe. - - Args: - df: A pandas DataFrame containing the data to be plotted. The DataFrame should - contain columns named 'RT' and 'intarray', representing the retention time - and intensity values, respectively, for each data point. - - Returns: - A plotly Figure object containing the BPC plot. - """ - intensity = np.array([max(intensity_array) for intensity_array in df["intarray"]]) - fig = px.line(df, x="RT", y=intensity) - fig.update_traces(line_color="#555FF5", line_width=3) - fig.update_traces(showlegend=False) fig.update_layout( - showlegend=False, - # title_text="base peak chromatogram (BPC)", + title=f"{st.session_state.view_selected_file}", xaxis_title="retention time (s)", - yaxis_title="intensity (cps)", + yaxis_title="intensity", plot_bgcolor="rgb(255,255,255)", - width=1000, + height=500, ) fig.layout.template = "plotly_white" return fig @@ -179,13 +144,147 @@ def create_spectra(x, y, zero=0): df = create_spectra(spec["mzarray"], spec["intarray"]) fig = px.line(df, x="mz", y="intensity") fig.update_traces(line_color=color) + fig.add_hline(0, line=dict(color="#DDDDDD"), line_width=3) fig.update_layout( showlegend=False, title_text=title, xaxis_title="m/z", yaxis_title="intensity", plot_bgcolor="rgb(255,255,255)", + dragmode="select", ) + # add annotations + top_indices = np.argsort(spec["intarray"])[-5:][::-1] + for index in top_indices: + mz = spec["mzarray"][index] + i = spec["intarray"][index] + fig.add_annotation( + dict( + x=mz, + y=i, + text=str(round(mz, 5)), + showarrow=False, + xanchor="left", + font=dict( + family="Open Sans Mono, monospace", + size=12, + color=color, + ), + ) + ) fig.layout.template = "plotly_white" - fig.update_yaxes(fixedrange=True) + # adjust x-axis limits to not cut peaks and annotations + x_values = [trace.x for trace in fig.data] + xmin = min([min(values) for values in x_values]) + xmax = max([max(values) for values in x_values]) + padding = 0.15 * (xmax - xmin) + fig.update_layout( + xaxis_range=[ + xmin - padding, + xmax + padding, + ] + ) return fig + + +@st.experimental_fragment +def view_peak_map(): + df = st.session_state.view_ms1 + if "view_peak_map_selection" in st.session_state: + box = st.session_state.view_peak_map_selection.selection.box + if box: + df = st.session_state.view_ms1.copy() + df = df[df["RT"] > box[0]["x"][0]] + df = df[df["mz"] > box[0]["y"][1]] + df = df[df["mz"] < box[0]["y"][0]] + df = df[df["RT"] < box[0]["x"][1]] + peak_map = plotMSExperiment( + df, plot3D=False, title=st.session_state.view_selected_file + ) + c1, c2 = st.columns(2) + with c1: + st.info( + "πŸ’‘ Zoom in via rectangular selection for more details and 3D plot. Double click plot to zoom back out." + ) + show_fig( + peak_map, + f"peak_map_{st.session_state.view_selected_file}", + selection_session_state_key="view_peak_map_selection", + ) + with c2: + if df.shape[0] < 2500: + peak_map_3D = plotMSExperiment(df, plot3D=True, title="") + st.pyplot(peak_map_3D, use_container_width=True) + + +@st.experimental_fragment +def view_spectrum(): + cols = st.columns([0.34, 0.66]) + with cols[0]: + df = st.session_state.view_spectra.copy() + df["spectrum ID"] = df.index + 1 + event = st.dataframe( + df, + column_order=[ + "spectrum ID", + "RT", + "MS level", + "max intensity m/z", + "precursor m/z", + ], + selection_mode="single-row", + on_select="rerun", + use_container_width=True, + hide_index=True, + ) + rows = event.selection.rows + with cols[1]: + if rows: + df = st.session_state.view_spectra.iloc[rows[0]] + if "view_spectrum_selection" in st.session_state: + box = st.session_state.view_spectrum_selection.selection.box + if box: + mz_min, mz_max = sorted(box[0]["x"]) + mask = (df["mzarray"] > mz_min) & (df["mzarray"] < mz_max) + df["intarray"] = df["intarray"][mask] + df["mzarray"] = df["mzarray"][mask] + + if df["mzarray"].size > 0: + title = f"{st.session_state.view_selected_file} spec={rows[0]+1} mslevel={df['MS level']}" + if df["precursor m/z"] > 0: + title += f" precursor m/z: {round(df['precursor m/z'], 4)}" + fig = plot_ms_spectrum(df, title, "#2d3a9d") + show_fig(fig, title.replace(" ", "_"), True, "view_spectrum_selection") + else: + st.session_state.pop("view_spectrum_selection") + st.rerun() + else: + st.info("πŸ’‘ Select rows in the spectrum table to display plot.") + + +@st.experimental_fragment() +def view_bpc_tic(): + cols = st.columns(5) + cols[0].checkbox( + "Total Ion Chromatogram (TIC)", True, key="view_tic", help="Plot TIC." + ) + cols[1].checkbox( + "Base Peak Chromatogram (BPC)", True, key="view_bpc", help="Plot BPC." + ) + cols[2].checkbox( + "Extracted Ion Chromatogram (EIC/XIC)", True, key="view_eic", help="Plot extracted ion chromatogram with specified m/z." + ) + cols[3].text_input( + "XIC m/z", + "235.1189", + help="m/z for XIC calculation.", + key="view_eic_mz", + ) + cols[4].number_input( + "XIC ppm tolerance", + 0.1, 50.0, 10.0, 1.0, + help="Tolerance for XIC calculation (ppm).", + key="view_eic_ppm" + ) + fig = plot_bpc_tic() + show_fig(fig, f"BPC-TIC-{st.session_state.view_selected_file}") diff --git a/win_exe_with_pyinstaller.md b/win_exe_with_pyinstaller.md index 729c15888..e66eab07b 100644 --- a/win_exe_with_pyinstaller.md +++ b/win_exe_with_pyinstaller.md @@ -63,7 +63,6 @@ like: \hooks\hook-streamlit.py
from PyInstaller.utils.hooks import copy_metadata datas = [] datas += copy_metadata('streamlit') -datas += copy_metadata('streamlit_plotly_events') datas += copy_metadata('pyopenms') # can add new package e-g datas += copy_metadata('captcha') @@ -112,7 +111,6 @@ datas=[ ("myenv/Lib/site-packages/altair/vegalite/v4/schema/vega-lite-schema.json","./altair/vegalite/v4/schema/"), ("myenv/Lib/site-packages/streamlit/static", "./streamlit/static"), ("myenv/Lib/site-packages/streamlit/runtime", "./streamlit/runtime"), - ("myenv/Lib/site-packages/streamlit_plotly_events", "./streamlit_plotly_events/"), ("myenv/Lib/site-packages/pyopenms", "./pyopenms/"), # Add new datas e-g we add in hook captcha ("myenv/Lib/site-packages/captcha", "./captcha/") From a55181867c1ca10715d709aa776a8fe4ae8d2e67 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Tue, 9 Jul 2024 11:39:17 +0200 Subject: [PATCH 03/24] move captcha control to page_setup --- app.py | 64 +++++++------------- "pages/10_\360\237\223\201_File_Upload.py" | 6 -- "pages/11_\360\237\221\200_View_Raw_Data.py" | 24 +++----- pages/12_Simple_Workflow.py | 10 +-- pages/13_Run_subprocess.py | 6 -- pages/15_Workflow_with_mzML_files.py | 7 --- src/common.py | 9 +++ 7 files changed, 43 insertions(+), 83 deletions(-) diff --git a/app.py b/app.py index 738de4e41..5d529be4b 100644 --- a/app.py +++ b/app.py @@ -18,27 +18,23 @@ None """ -import sys - from pathlib import Path import streamlit as st -from src.captcha_ import captcha_control from src.common import page_setup, save_params params = page_setup(page="main") +st.title("OpenMS Streamlit Template App") +st.markdown(""" +This repository contains a template app for OpenMS workflows in a web application using the **streamlit** framework. + +It serves as a foundation for apps ranging from simple workflows with **pyOpenMS** to complex workflows utilizing **OpenMS TOPP tools** with parallel execution. -def main(): - """ - Display main page content. - """ - st.title("OpenMS Streamlit Template App") - st.info(""" -This repository contains a template app for OpenMS workflows in a web application using the **streamlit** framework. It serves as a foundation for apps ranging from simple workflows with **pyOpenMS** to complex workflows utilizing **OpenMS TOPP tools** with parallel execution. It includes solutions for handling user data and parameters in workspaces as well as deployment with docker-compose. +It includes solutions for handling user data and parameters in workspaces as well as deployment with docker-compose. """) - st.subheader("Features") - st.markdown(""" +st.subheader("Features") +st.markdown(""" - Workspaces for user data with unique shareable IDs - Persistent parameters and input files within a workspace - Captcha control @@ -46,42 +42,26 @@ def main(): - framework for workflows with OpenMS TOPP tools - Deployment [with docker-compose](https://github.com/OpenMS/streamlit-deployment) """) - st.subheader("Quick Start") - if Path("OpenMS-App.zip").exists(): - st.markdown(""" +st.subheader("Quick Start") +if Path("OpenMS-App.zip").exists(): + st.markdown(""" Download the latest version for Windows here by clicking the button below. """) - with open("OpenMS-App.zip", "rb") as file: - st.download_button( - label="Download for Windows", - data=file, - file_name="OpenMS-App.zip", - mime="archive/zip", - type="primary", - ) - st.markdown(""" + with open("OpenMS-App.zip", "rb") as file: + st.download_button( + label="Download for Windows", + data=file, + file_name="OpenMS-App.zip", + mime="archive/zip", + type="primary", + ) + st.markdown(""" Extract the zip file and run the executable (.exe) file to launch the app. Since every dependency is compressed and packacked the app will take a while to launch (up to one minute). """) - st.markdown(""" +st.markdown(""" Check out the documentation for **users** and **developers** is included as pages indicated by the πŸ“– icon Try the example pages **πŸ“ mzML file upload**, **πŸ‘€ visualization** and **example workflows**. """) - save_params(params) - - -# Check if the script is run in local mode (e.g., "streamlit run app.py local") -if "local" in sys.argv: - # In local mode, run the main function without applying captcha - main() - -# If not in local mode, assume it's hosted/online mode -else: - # WORK LIKE MULTIPAGE APP - if "controllo" not in st.session_state or st.session_state["controllo"] is False: - # Apply captcha control to verify the user - captcha_control() - else: - # Run the main function - main() +save_params(params) \ No newline at end of file diff --git "a/pages/10_\360\237\223\201_File_Upload.py" "b/pages/10_\360\237\223\201_File_Upload.py" index e30fa2f70..ec5892cab 100755 --- "a/pages/10_\360\237\223\201_File_Upload.py" +++ "b/pages/10_\360\237\223\201_File_Upload.py" @@ -3,17 +3,11 @@ import streamlit as st import pandas as pd -from src.captcha_ import captcha_control from src.common import page_setup, save_params, v_space, show_table from src import fileupload params = page_setup() -# If run in hosted mode, show captcha as long as it has not been solved -if "controllo" not in st.session_state or params["controllo"] is False: - # Apply captcha by calling the captcha_control function - captcha_control() - st.title("File Upload") tabs = ["File Upload", "Example Data"] diff --git "a/pages/11_\360\237\221\200_View_Raw_Data.py" "b/pages/11_\360\237\221\200_View_Raw_Data.py" index f2ae9c2fa..fad276d17 100755 --- "a/pages/11_\360\237\221\200_View_Raw_Data.py" +++ "b/pages/11_\360\237\221\200_View_Raw_Data.py" @@ -4,16 +4,10 @@ from src.common import page_setup from src import view -from src.captcha_ import captcha_control params = page_setup() -# If run in hosted mode, show captcha as long as it has not been solved -if "controllo" not in st.session_state or params["controllo"] is False: - # Apply captcha by calling the captcha_control function - captcha_control() - st.title("View raw MS data") # File selection can not be in fragment since it influences the subsequent sections @@ -27,12 +21,12 @@ view.get_df(Path(st.session_state.workspace, "mzML-files", selected_file)) -tabs = st.tabs( - ["πŸ“ˆ Peak map (MS1)", "πŸ“ˆ Spectra (MS1 + MS2)", "πŸ“ˆ Chromatograms (MS1)"] -) -with tabs[0]: - view.view_peak_map() -with tabs[1]: - view.view_spectrum() -with tabs[2]: - view.view_bpc_tic() + tabs = st.tabs( + ["πŸ“ˆ Peak map (MS1)", "πŸ“ˆ Spectra (MS1 + MS2)", "πŸ“ˆ Chromatograms (MS1)"] + ) + with tabs[0]: + view.view_peak_map() + with tabs[1]: + view.view_spectrum() + with tabs[2]: + view.view_bpc_tic() diff --git a/pages/12_Simple_Workflow.py b/pages/12_Simple_Workflow.py index ec5544555..8084d85bc 100755 --- a/pages/12_Simple_Workflow.py +++ b/pages/12_Simple_Workflow.py @@ -2,16 +2,10 @@ from src.common import page_setup, save_params, show_table from src import simpleworkflow -from src.captcha_ import captcha_control # Page name "workflow" will show mzML file selector in sidebar params = page_setup() -# If run in hosted mode, show captcha as long as it has not been solved -if "controllo" not in st.session_state or params["controllo"] is False: - # Apply captcha by calling the captcha_control function - captcha_control() - st.title("Simple Workflow") st.markdown("Example for a simple workflow with quick execution times.") @@ -40,7 +34,9 @@ # Get a dataframe with x and y dimensions via time consuming (sleep) cached function # If the input has been given before, the function does not run again # Input x from local variable, input y from session state via key -df = simpleworkflow.generate_random_table(xdimension, st.session_state["example-y-dimension"]) +df = simpleworkflow.generate_random_table( + xdimension, st.session_state["example-y-dimension"] +) # Display dataframe via custom show_table function, which will render a download button as well show_table(df, download_name="random-table") diff --git a/pages/13_Run_subprocess.py b/pages/13_Run_subprocess.py index 30dd264f8..8b3c01c21 100644 --- a/pages/13_Run_subprocess.py +++ b/pages/13_Run_subprocess.py @@ -5,17 +5,11 @@ from pathlib import Path from src.common import page_setup, save_params -from src.captcha_ import captcha_control from src.run_subprocess import run_subprocess # Page name "workflow" will show mzML file selector in sidebar params = page_setup() -# If run in hosted mode, show captcha as long as it has not been solved -if "controllo" not in st.session_state or params["controllo"] is False: - # Apply captcha by calling the captcha_control function - captcha_control() - st.title("Run subprocess") st.markdown( """ diff --git a/pages/15_Workflow_with_mzML_files.py b/pages/15_Workflow_with_mzML_files.py index 7bbaf5237..a27402334 100755 --- a/pages/15_Workflow_with_mzML_files.py +++ b/pages/15_Workflow_with_mzML_files.py @@ -6,17 +6,10 @@ from src.common import page_setup, save_params, show_fig, show_table from src import mzmlfileworkflow -from src.captcha_ import captcha_control - # Page name "workflow" will show mzML file selector in sidebar params = page_setup() -# If run in hosted mode, show captcha as long as it has not been solved -if "controllo" not in st.session_state or params["controllo"] is False: - # Apply captcha by calling the captcha_control function - captcha_control() - st.title("Workflow") st.markdown( """ diff --git a/src/common.py b/src/common.py index 87c294dbb..edb26b247 100644 --- a/src/common.py +++ b/src/common.py @@ -9,6 +9,8 @@ import streamlit as st import pandas as pd +from .captcha_ import captcha_control + # set these variables according to your project APP_NAME = "OpenMS Streamlit App" REPOSITORY_NAME = "streamlit-template" @@ -127,6 +129,13 @@ def page_setup(page: str = "") -> dict[str, Any]: # Render the sidebar params = render_sidebar(page) + + # If run in hosted mode, show captcha as long as it has not been solved + if not "local" in sys.argv: + if "controllo" not in st.session_state or params["controllo"] is False: + # Apply captcha by calling the captcha_control function + captcha_control() + return params From 694becbd4077129e6d122141a2e8a62fe1b65957 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Tue, 9 Jul 2024 14:08:23 +0200 Subject: [PATCH 04/24] docs in one page --- app.py | 8 +- pages/0_Documentation.py | 594 ++++++++++++++++++ "pages/0_\360\237\223\226_Installation.py" | 61 -- "pages/1_\360\237\223\226_User_Guide.py" | 52 -- "pages/2_\360\237\223\226_Build_App.py" | 78 --- ...60\237\223\226_TOPP_Workflow_Framework.py" | 254 -------- .../4_\360\237\223\226_Windows_executable.py" | 78 --- "pages/5_\360\237\223\226_Deployment.py" | 15 - 8 files changed, 597 insertions(+), 543 deletions(-) create mode 100644 pages/0_Documentation.py delete mode 100644 "pages/0_\360\237\223\226_Installation.py" delete mode 100644 "pages/1_\360\237\223\226_User_Guide.py" delete mode 100644 "pages/2_\360\237\223\226_Build_App.py" delete mode 100644 "pages/3_\360\237\223\226_TOPP_Workflow_Framework.py" delete mode 100644 "pages/4_\360\237\223\226_Windows_executable.py" delete mode 100644 "pages/5_\360\237\223\226_Deployment.py" diff --git a/app.py b/app.py index 5d529be4b..ace5767d3 100644 --- a/app.py +++ b/app.py @@ -21,9 +21,9 @@ from pathlib import Path import streamlit as st -from src.common import page_setup, save_params +from src.common import page_setup -params = page_setup(page="main") +page_setup(page="main") st.title("OpenMS Streamlit Template App") st.markdown(""" @@ -62,6 +62,4 @@ Check out the documentation for **users** and **developers** is included as pages indicated by the πŸ“– icon Try the example pages **πŸ“ mzML file upload**, **πŸ‘€ visualization** and **example workflows**. -""") - -save_params(params) \ No newline at end of file +""") \ No newline at end of file diff --git a/pages/0_Documentation.py b/pages/0_Documentation.py new file mode 100644 index 000000000..436e45929 --- /dev/null +++ b/pages/0_Documentation.py @@ -0,0 +1,594 @@ +import streamlit as st +from src.Workflow import Workflow +from src.workflow.StreamlitUI import StreamlitUI +from src.workflow.FileManager import FileManager +from src.workflow.CommandExecutor import CommandExecutor +from src.common import page_setup +from inspect import getsource +from pathlib import Path +import requests + +page_setup() + + +st.title("πŸ“– Documentation") + +cols = st.columns(2) + +pages = [ + "User Guide", + "Installation", + "Developers Guide: How to build app based on this template", + "Developers Guide: TOPP Workflow Framework", + "Developer Guide: Windows Executables", + "Developers Guide: Deployment", +] +page = cols[0].selectbox( + "**Content**", + pages, +) + +############################################################################################# +# User Guide +############################################################################################# + +if page == pages[0]: + st.markdown( + """ +# User Guide + +Welcome to the OpenMS Streamlit Web Application! This guide will help you understand how to use our tools effectively. + +## Advantages of OpenMS Web Apps + +OpenMS web applications provide a user-friendly interface for accessing the powerful features of OpenMS. Here are a few advantages: +- **Accessibility**: Access powerful OpenMS algorithms and TOPP tools from any device with a web browser. +- **Ease of Use**: Simplified user interface makes it easy for both beginners and experts to perform complex analyses. +- **No Installation Required**: Use the tools without the need to install OpenMS locally, saving time and system resources. + +## Workspaces + +In the OpenMS web application, workspaces are designed to keep your analysis organized: +- **Workspace Specific Parameters and Files**: Each workspace stores parameters and files (uploaded input files and results from workflows). +- **Persistence**: Your workspaces and parameters are saved, so you can return to your analysis anytime and pick up where you left off. + +## Online and Local Mode Differences + +There are a few key differences between operating in online and local modes: +- **File Uploads**: + - *Online Mode*: You can upload only one file at a time. This helps manage server load and optimizes performance. + - *Local Mode*: Multiple file uploads are supported, giving you flexibility when working with large datasets. +- **Workspace Access**: + - In online mode, workspaces are stored temporarily and will be cleared after seven days of inactivity. + - In local mode, workspaces are saved on your local machine, allowing for persistent storage. + +## Downloading Results + +You can download the results of your analyses, including figures and tables, directly from the application: +- **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. +- **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. + +## Getting Started + +To get started: +1. Select or create a new workspace. +2. Upload your data file. +3. Set the necessary parameters for your analysis. +4. Run the analysis. +5. View and download your results. + +For more detailed information on each step, refer to the specific sections of this guide. +""" + ) + +############################################################################################# +# Installation +############################################################################################# + +if page == pages[1]: + if Path("OpenMS-App.zip").exists(): + st.markdown( + """ +Download the latest version for **Windows** here clicking the button below. +""" + ) + with open("OpenMS-App.zip", "rb") as file: + st.download_button( + label="Download for Windows", + data=file, + file_name="OpenMS-App.zip", + mime="archive/zip", + type="primary", + ) + + st.markdown( + """ +# Installation + +## Windows + +The app is available as pre-packaged Windows executable, including all dependencies. + +The windows executable is built by a GitHub action and can be downloaded [here](https://github.com/OpenMS/streamlit-template/actions/workflows/build-windows-executable-app.yaml). +Select the latest successfull run and download the zip file from the artifacts section, while signed in to GitHub. + +## Python + +Clone the [streamlit-template repository](https://github.com/OpenMS/streamlit-template). It includes files to install dependencies via pip or conda. + +### via pip in an existing Python environment + +To install all required depdencies via pip in an already existing Python environment, run the following command in the terminal: + +`pip install -r requirements.txt` + +### create new environment via conda/mamba + +Create and activate the conda environment: + +`conda env create -f environment.yml` + +`conda activate streamlit-env` + +### run the app + +Run the app via streamlit command in the terminal with or without *local* mode (default is *online* mode). Learn more about *local* and *online* mode in the documentation page πŸ“– **OpenMS Template App**. + +`streamlit run app.py [local]` + +## Docker + +This repository contains two Dockerfiles. + +1. `Dockerfile`: This Dockerfile builds all dependencies for the app including Python packages and the OpenMS TOPP tools. Recommended for more complex workflows where you want to use the OpenMS TOPP tools for instance with the **TOPP Workflow Framework**. +2. `Dockerfile_simple`: This Dockerfile builds only the Python packages. Recommended for simple apps using pyOpenMS only. + +""" + ) + +############################################################################################# +# Developer Overview, how to build app based on Template +############################################################################################# + +if page == pages[2]: + st.markdown( + """ +# Build your own app based on this template + +## App layout + +- *Main page* contains explanatory text on how to use the app and a workspace selector. `app.py` +- *Pages* can be navigated via *Sidebar*. Sidebar also contains the OpenMS logo, settings panel and a workspace indicator. The *main page* contains a workspace selector as well. +- See *pages* in the template app for example use cases. The content of this app serves as a documentation. + +## Key concepts + +- **Workspaces** +: Directories where all data is generated and uploaded can be stored as well as a workspace specific parameter file. +- **Run the app locally and online** +: Launching the app with the `local` argument lets the user create/remove workspaces. In the online the user gets a workspace with a specific ID. +- **Parameters** +: Parameters (defaults in `assets/default-params.json`) store changing parameters for each workspace. Parameters are loaded via the page_setup function at the start of each page. To track a widget variable via parameters simply give them a key and add a matching entry in the default parameters file. Initialize a widget value from the params dictionary. + +```python +params = page_setup() + +st.number_input(label="x dimension", min_value=1, max_value=20, +value=params["example-y-dimension"], step=1, key="example-y-dimension") + +save_params() +``` + +## Code structure + +- **Pages** must be placed in the `pages` directory. +- It is recommended to use a separate file for defining functions per page in the `src` directory. +- The `src/common.py` file contains a set of useful functions for common use (e.g. rendering a table with download button). + +## Modify the template to build your own app + +1. In `src/common.py`, update the name of your app and the repository name + ```python + APP_NAME = "OpenMS Streamlit App" + REPOSITORY_NAME = "streamlit-template" + ``` +2. In `clean-up-workspaces.py`, update the name of the workspaces directory to `/workspaces-` + ```python + workspaces_directory = Path("/workspaces-streamlit-template") + ``` +3. Update `README.md` accordingly + + +**Dockerfile-related** +1. Choose one of the Dockerfiles depending on your use case: + - `Dockerfile` builds OpenMS including TOPP tools + - `Dockerfile_simple` uses pyOpenMS only +2. Update the Dockerfile: + - with the `GITHUB_USER` owning the Streamlit app repository + - with the `GITHUB_REPO` name of the Streamlit app repository + - if your main page Python file is not called `app.py`, modify the following line + ```dockerfile + RUN echo "mamba run --no-capture-output -n streamlit-env streamlit run app.py" >> /app/entrypoint.sh + ``` +3. Update Python package dependency files: + - `requirements.txt` if using `Dockerfile_simple` + - `environment.yml` if using `Dockerfile` + +## How to build a workflow + +### Simple workflow using pyOpenMS + +Take a look at the example pages `Simple Workflow` or `Workflow with mzML files` for examples (on the *sidebar*). Put Streamlit logic inside the pages and call the functions with workflow logic from from the `src` directory (for our examples `src/simple_workflow.py` and `src/mzmlfileworkflow.py`). + +### Complex workflow using TOPP tools + +This template app features a module in `src/workflow` that allows for complex and long workflows to be built very efficiently. Check out the `TOPP Workflow Framework` page for more information (on the *sidebar*). +""" + ) + +############################################################################################# +# TOPP Workflow Framework +############################################################################################# + +if page == pages[3]: + wf = Workflow() + + st.title("TOPP Workflow Framework Documentation") + + st.markdown( + """ +## Features + +- streamlined methods for uploading files, setting parameters, and executing workflows +- automatic parameter handling +- quickly build parameter interface for TOPP tools with all parameters from *ini* files +- automatically create a log file for each workflow run with stdout and stderr +- workflow output updates automatically in short intervalls +- user can leave the app and return to the running workflow at any time +- quickly build a workflow with multiple steps channelling files between steps +""" + ) + + st.markdown( + """ +## Quickstart + +This repository contains a module in `src/workflow` that provides a framework for building and running analysis workflows. + +The `WorkflowManager` class provides the core workflow logic. It uses the `Logger`, `FileManager`, `ParameterManager`, and `CommandExecutor` classes to setup a complete workflow logic. + +To build your own workflow edit the file `src/TOPPWorkflow.py`. Use any streamlit components such as tabs (as shown in example), columns, or even expanders to organize the helper functions for displaying file upload and parameter widgets. + +> πŸ’‘ Simply set a name for the workflow and overwrite the **`upload`**, **`configure`**, **`execution`** and **`results`** methods in your **`Workflow`** class. + +The file `pages/6_TOPP-Workflow.py` displays the workflow content and can, but does not have to be modified. + +The `Workflow` class contains four important members, which you can use to build your own workflow: + +> **`self.params`:** dictionary of parameters stored in a JSON file in the workflow directory. Parameter handling is done automatically. Default values are defined in input widgets and non-default values are stored in the JSON file. + +> **`self.ui`:** object of type `StreamlitUI` contains helper functions for building the parameter and file upload widgets. + +> **`self.executor`:** object of type `CommandExecutor` can be used to run any command line tool alone or in parallel and includes a convenient method for running TOPP tools. + +> **`self.logger`:** object of type `Logger` to write any output to a log file during workflow execution. + +> **`self.file_manager`:** object of type `FileManager` to handle file types and creation of output directories. +""" + ) + + with st.expander("**Complete example for custom Workflow class**", expanded=False): + st.code(getsource(Workflow)) + + st.markdown( + """ +## File Upload + +All input files for the workflow will be stored within the workflow directory in the subdirectory `input-files` within it's own subdirectory for the file type. + +The subdirectory name will be determined by a **key** that is defined in the `self.ui.upload_widget` method. The uploaded files are available by the specific key for parameter input widgets and accessible while building the workflow. + +Calling this method will create a complete file upload widget section with the following components: + +- file uploader +- list of currently uploaded files with this key (or a warning if there are none) +- button to delete all files + +Fallback files(s) can be specified, which will be used if the user doesn't upload any files. This can be useful for example for database files where a default is provided. +""" + ) + + st.code(getsource(Workflow.upload)) + + st.info( + "πŸ’‘ Use the same **key** for parameter widgets, to select which of the uploaded files to use for analysis." + ) + + with st.expander("**Code documentation:**", expanded=True): + st.help(StreamlitUI.upload_widget) + + st.markdown( + """ +## Parameter Input + +The paramter section is already pre-defined as a form with buttons to **save parameters** and **load defaults** and a toggle to show TOPP tool parameters marked as advanced. + +Generating parameter input widgets is done with the `self.ui.input` method for any parameter and the `self.ui.input_TOPP` method for TOPP tools. + +**1. Choose `self.ui.input_widget` for any paramter not-related to a TOPP tool or `self.ui.select_input_file` for any input file:** + +It takes the obligatory **key** parameter. The key is used to access the parameter value in the workflow parameters dictionary `self.params`. Default values do not need to be specified in a separate file. Instead they are determined from the widgets default value automatically. Widget types can be specified or automatically determined from **default** and **options** parameters. It's suggested to add a **help** text and other parameters for numerical input. + +Make sure to match the **key** of the upload widget when calling `self.ui.input_TOPP`. + +**2. Choose `self.ui.input_TOPP` to automatically generate complete input sections for a TOPP tool:** + +It takes the obligatory **topp_tool_name** parameter and generates input widgets for each parameter present in the **ini** file (automatically created) except for input and output file parameters. For all input file parameters a widget needs to be created with `self.ui.select_input_file` with an appropriate **key**. For TOPP tool parameters only non-default values are stored. + +**3. Choose `self.ui.input_python` to automatically generate complete input sections for a custom Python tool:** + +Takes the obligatory **script_file** argument. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Parameters need to be specified in the Python script in the **DEFAULTS** variable with the mandatory **key** and **value** parameters. +""" + ) + + with st.expander( + "Options to use as dictionary keys for parameter definitions (see `src/python-tools/example.py` for an example)" + ): + st.markdown( + """ +**Mandatory** keys for each parameter +- *key:* a unique identifier +- *value:* the default value + +**Optional** keys for each parameter +- *name:* the name of the parameter +- *hide:* don't show the parameter in the parameter section (e.g. for **input/output files**) +- *options:* a list of valid options for the parameter +- *min:* the minimum value for the parameter (int and float) +- *max:* the maximum value for the parameter (int and float) +- *step_size:* the step size for the parameter (int and float) +- *help:* a description of the parameter +- *widget_type:* the type of widget to use for the parameter (default: auto) +- *advanced:* whether or not the parameter is advanced (default: False) +""" + ) + + st.code(getsource(Workflow.configure)) + st.info( + "πŸ’‘ Access parameter widget values by their **key** in the `self.params` object, e.g. `self.params['mzML-files']` will give all selected mzML files." + ) + + with st.expander("**Code documentation**", expanded=True): + st.help(StreamlitUI.input_widget) + st.help(StreamlitUI.select_input_file) + st.help(StreamlitUI.input_TOPP) + st.help(StreamlitUI.input_python) + st.markdown( + """ +## Building the Workflow + +Building the workflow involves **calling all (TOPP) tools** using **`self.executor`** with **input and output files** based on the **`FileManager`** class. For TOPP tools non-input-output parameters are handled automatically. Parameters for other processes and workflow logic can be accessed via widget keys (set in the parameter section) in the **`self.params`** dictionary. + +### FileManager + +The `FileManager` class serves as an interface for unified input and output files with useful functionality specific to building workflows, such as **setting a (new) file type** and **subdirectory in the workflows result directory**. + +Use the **`get_files`** method to get a list of all file paths as strings. + +Optionally set the following parameters modify the files: + +- **set_file_type** (str): set new file types and result subdirectory. +- **set_results_dir** (str): set a new subdirectory in the workflows result directory. +- **collect** (bool): collect all files into a single list. Will return a list with a single entry, which is a list of all files. Useful to pass to tools which can handle multiple input files at once. +""" + ) + + st.code( + """ +# Get all file paths as strings from self.param entry. +mzML_files = self.file_manager.get_files(self.params["mzML-files]) +# mzML_files = ['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML'] + +# Creating output files for a TOPP tool, setting a new file type and result subdirectory name. +feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="feature-detection") +# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Treatment.featureXML'] + +# Setting a name for the output directory automatically (useful if you never plan to access these files in the results section). +feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="auto") +# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Treatment.featureXML'] + +# Combining all mzML files to be passed to a TOPP tool in a single run. Using "collected" files as argument for self.file_manager.get_files will "un-collect" them. +mzML_files = self.file_manager.get_files(mzML_files, collect=True) +# mzML_files = [['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML']] + """ + ) + + with st.expander("**Code documentation**", expanded=True): + st.help(FileManager.get_files) + + st.markdown( + """ +### Running commands + +It is possible to execute any command line command using the **`self.executor`** object, either a single command or a list of commands in parallel. Furthermore a method to run TOPP tools is included. + +**1. Single command** + +The `self.executor.run_command` method takes a single command as input and optionally logs stdout and stderr to the workflow log (default True). +""" + ) + + st.code( + """ +self.executor.run_command(["command", "arg1", "arg2", ...]) +""" + ) + + st.markdown( + """ +**2. Run multiple commands in parallel** + +The `self.executor.run_multiple_commands` method takes a list of commands as inputs. + +**3. Run TOPP tools** + +The `self.executor.run_topp` method takes a TOPP tool name as input and a dictionary of input and output files as input. The **keys** need to match the actual input and output parameter names of the TOPP tool. The **values** should be of type `FileManager`. All other **non-default parameters (from input widgets)** will be passed to the TOPP tool automatically. + +Depending on the number of input files, the TOPP tool will be run either in parallel or in a single run (using **`FileManager.collect`**). +""" + ) + + st.info( + """πŸ’‘ **Input and output file order** + +In many tools, a single input file is processed to produce a single output file. +When dealing with lists of input or output files, the convention is that +files are paired based on their order. For instance, the n-th input file is +assumed to correspond to the n-th output file, maintaining a structured +relationship between input and output data. +""" + ) + st.code( + """ +# e.g. FeatureFinderMetabo takes single input files +in_files = self.file_manager.get_files(["sample1.mzML", "sample2.mzML"]) +out_files = self.file_manager.get_files(in_files, set_file_type="featureXML", set_results_dir="feature-detection") + +# Run FeatureFinderMetabo tool with input and output files in parallel for each pair of input/output files. +self.executor.run_topp("FeatureFinderMetabo", input_output={"in": in_files, "out": out_files}) +# FeaturFinderMetabo -in sample1.mzML -out workspace-dir/results/feature-detection/sample1.featureXML +# FeaturFinderMetabo -in sample2.mzML -out workspace-dir/results/feature-detection/sample2.featureXML + +# Run SiriusExport tool with mutliple input and output files. +out = self.file_manager.get_files("sirius.ms", set_results_dir="sirius-export") +self.executor.run_topp("SiriusExport", {"in": self.file_manager.get_files(in_files, collect=True), + "in_featureinfo": self.file_manager.get_files(out_files, collect=True), + "out": out_se}) +# SiriusExport -in sample1.mzML sample2.mzML -in_featureinfo sample1.featureXML sample2.featureXML -out sirius.ms + """ + ) + + st.markdown( + """ +**4. Run custom Python scripts** + +Sometimes it is useful to run custom Python scripts, for example for extra functionality which is not included in a TOPP tool. + +`self.executor.run_python` works similar to `self.executor.run_topp`, but takes a single Python script as input instead of a TOPP tool name. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Input and output file parameters need to be specified in the **input_output** dictionary. +""" + ) + + st.code( + """ +# e.g. example Python tool which modifies mzML files in place based on experimental design +self.ui.input_python(script_file="example", input_output={"in": in_mzML, "in_experimantal_design": FileManager(["path/to/experimantal-design.tsv"])}) + """ + ) + + st.markdown("**Example for a complete workflow section:**") + + st.code(getsource(Workflow.execution)) + + with st.expander("**Code documentation**", expanded=True): + st.help(CommandExecutor.run_command) + st.help(CommandExecutor.run_multiple_commands) + st.help(CommandExecutor.run_topp) + st.help(CommandExecutor.run_python) + +############################################################################################# +# Windows Executables +############################################################################################# + +if page == pages[4]: + # Define CSS styles + css = """ + +""" + + st.markdown(css, unsafe_allow_html=True) + + st.markdown( + """ +# πŸ’» How to package everything for Windows executables + +This guide explains how to package OpenMS apps into Windows executables using two different methods: +""" + ) + + + def fetch_markdown_content(url): + response = requests.get(url) + if response.status_code == 200: + # Remove the first line from the content + content_lines = response.text.split("\n") + markdown_content = "\n".join(content_lines[1:]) + return markdown_content + else: + return None + + + tabs = ["embeddable Python", "PyInstaller"] + tabs = st.tabs(tabs) + + # window executable with embeddable python + with tabs[0]: + markdown_url = "https://raw.githubusercontent.com/OpenMS/streamlit-template/main/win_exe_with_embed_py.md" + + markdown_content = fetch_markdown_content(markdown_url) + + if markdown_content: + st.markdown(markdown_content, unsafe_allow_html=True) + else: + st.error( + "Failed to fetch Markdown content from the specified URL.", markdown_url + ) + + # window executable with pyinstaller + with tabs[1]: + # URL of the Markdown document + markdown_url = "https://raw.githubusercontent.com/OpenMS/streamlit-template/main/win_exe_with_pyinstaller.md" + + markdown_content = fetch_markdown_content(markdown_url) + + if markdown_content: + st.markdown(markdown_content, unsafe_allow_html=True) + else: + st.error( + "Failed to fetch Markdown content from the specified URL. ", markdown_url + ) + +############################################################################################# +# Deployment +############################################################################################# + +if page == pages[5]: + url = "https://raw.githubusercontent.com/OpenMS/streamlit-deployment/main/README.md" + + response = requests.get(url) + + if response.status_code == 200: + st.markdown(response.text) # or process the content as needed + else: + st.warning("Failed to get README from streamlit-deployment repository.") \ No newline at end of file diff --git "a/pages/0_\360\237\223\226_Installation.py" "b/pages/0_\360\237\223\226_Installation.py" deleted file mode 100644 index 82070e018..000000000 --- "a/pages/0_\360\237\223\226_Installation.py" +++ /dev/null @@ -1,61 +0,0 @@ -import streamlit as st -from pathlib import Path -from src.common import page_setup - -page_setup() - -if Path("OpenMS-App.zip").exists(): - st.markdown(""" -Download the latest version for **Windows** here clicking the button below. -""") - with open("OpenMS-App.zip", "rb") as file: - st.download_button( - label="Download for Windows", - data=file, - file_name="OpenMS-App.zip", - mime="archive/zip", - type="primary", - ) - -st.markdown(""" -# Installation - -## Windows - -The app is available as pre-packaged Windows executable, including all dependencies. - -The windows executable is built by a GitHub action and can be downloaded [here](https://github.com/OpenMS/streamlit-template/actions/workflows/build-windows-executable-app.yaml). -Select the latest successfull run and download the zip file from the artifacts section, while signed in to GitHub. - -## Python - -Clone the [streamlit-template repository](https://github.com/OpenMS/streamlit-template). It includes files to install dependencies via pip or conda. - -### via pip in an existing Python environment - -To install all required depdencies via pip in an already existing Python environment, run the following command in the terminal: - -`pip install -r requirements.txt` - -### create new environment via conda/mamba - -Create and activate the conda environment: - -`conda env create -f environment.yml` - -`conda activate streamlit-env` - -### run the app - -Run the app via streamlit command in the terminal with or without *local* mode (default is *online* mode). Learn more about *local* and *online* mode in the documentation page πŸ“– **OpenMS Template App**. - -`streamlit run app.py [local]` - -## Docker - -This repository contains two Dockerfiles. - -1. `Dockerfile`: This Dockerfile builds all dependencies for the app including Python packages and the OpenMS TOPP tools. Recommended for more complex workflows where you want to use the OpenMS TOPP tools for instance with the **TOPP Workflow Framework**. -2. `Dockerfile_simple`: This Dockerfile builds only the Python packages. Recommended for simple apps using pyOpenMS only. - -""") \ No newline at end of file diff --git "a/pages/1_\360\237\223\226_User_Guide.py" "b/pages/1_\360\237\223\226_User_Guide.py" deleted file mode 100644 index dc3bd6aed..000000000 --- "a/pages/1_\360\237\223\226_User_Guide.py" +++ /dev/null @@ -1,52 +0,0 @@ -import streamlit as st -from src.common import page_setup - -page_setup() - -st.markdown(""" -# User Guide - -Welcome to the OpenMS Streamlit Web Application! This guide will help you understand how to use our tools effectively. - -## Advantages of OpenMS Web Apps - -OpenMS web applications provide a user-friendly interface for accessing the powerful features of OpenMS. Here are a few advantages: -- **Accessibility**: Access powerful OpenMS algorithms and TOPP tools from any device with a web browser. -- **Ease of Use**: Simplified user interface makes it easy for both beginners and experts to perform complex analyses. -- **No Installation Required**: Use the tools without the need to install OpenMS locally, saving time and system resources. - -## Workspaces - -In the OpenMS web application, workspaces are designed to keep your analysis organized: -- **Workspace Specific Parameters and Files**: Each workspace stores parameters and files (uploaded input files and results from workflows). -- **Persistence**: Your workspaces and parameters are saved, so you can return to your analysis anytime and pick up where you left off. - -## Online and Local Mode Differences - -There are a few key differences between operating in online and local modes: -- **File Uploads**: - - *Online Mode*: You can upload only one file at a time. This helps manage server load and optimizes performance. - - *Local Mode*: Multiple file uploads are supported, giving you flexibility when working with large datasets. -- **Workspace Access**: - - In online mode, workspaces are stored temporarily and will be cleared after seven days of inactivity. - - In local mode, workspaces are saved on your local machine, allowing for persistent storage. - -## Downloading Results - -You can download the results of your analyses, including figures and tables, directly from the application: -- **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. -- **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. - -## Getting Started - -To get started: -1. Select or create a new workspace. -2. Upload your data file. -3. Set the necessary parameters for your analysis. -4. Run the analysis. -5. View and download your results. - -For more detailed information on each step, refer to the specific sections of this guide. -""") - - diff --git "a/pages/2_\360\237\223\226_Build_App.py" "b/pages/2_\360\237\223\226_Build_App.py" deleted file mode 100644 index ffb45c724..000000000 --- "a/pages/2_\360\237\223\226_Build_App.py" +++ /dev/null @@ -1,78 +0,0 @@ -import streamlit as st - -from src.common import page_setup - -page_setup() - -st.markdown(""" -# Build your own app based on this template - -## App layout - -- *Main page* contains explanatory text on how to use the app and a workspace selector. `app.py` -- *Pages* can be navigated via *Sidebar*. Sidebar also contains the OpenMS logo, settings panel and a workspace indicator. The *main page* contains a workspace selector as well. -- See *pages* in the template app for example use cases. The content of this app serves as a documentation. - -## Key concepts - -- **Workspaces** -: Directories where all data is generated and uploaded can be stored as well as a workspace specific parameter file. -- **Run the app locally and online** -: Launching the app with the `local` argument lets the user create/remove workspaces. In the online the user gets a workspace with a specific ID. -- **Parameters** -: Parameters (defaults in `assets/default-params.json`) store changing parameters for each workspace. Parameters are loaded via the page_setup function at the start of each page. To track a widget variable via parameters simply give them a key and add a matching entry in the default parameters file. Initialize a widget value from the params dictionary. - -```python -params = page_setup() - -st.number_input(label="x dimension", min_value=1, max_value=20, -value=params["example-y-dimension"], step=1, key="example-y-dimension") - -save_params() -``` - -## Code structure - -- **Pages** must be placed in the `pages` directory. -- It is recommended to use a separate file for defining functions per page in the `src` directory. -- The `src/common.py` file contains a set of useful functions for common use (e.g. rendering a table with download button). - -## Modify the template to build your own app - -1. In `src/common.py`, update the name of your app and the repository name - ```python - APP_NAME = "OpenMS Streamlit App" - REPOSITORY_NAME = "streamlit-template" - ``` -2. In `clean-up-workspaces.py`, update the name of the workspaces directory to `/workspaces-` - ```python - workspaces_directory = Path("/workspaces-streamlit-template") - ``` -3. Update `README.md` accordingly - - -**Dockerfile-related** -1. Choose one of the Dockerfiles depending on your use case: - - `Dockerfile` builds OpenMS including TOPP tools - - `Dockerfile_simple` uses pyOpenMS only -2. Update the Dockerfile: - - with the `GITHUB_USER` owning the Streamlit app repository - - with the `GITHUB_REPO` name of the Streamlit app repository - - if your main page Python file is not called `app.py`, modify the following line - ```dockerfile - RUN echo "mamba run --no-capture-output -n streamlit-env streamlit run app.py" >> /app/entrypoint.sh - ``` -3. Update Python package dependency files: - - `requirements.txt` if using `Dockerfile_simple` - - `environment.yml` if using `Dockerfile` - -## How to build a workflow - -### Simple workflow using pyOpenMS - -Take a look at the example pages `Simple Workflow` or `Workflow with mzML files` for examples (on the *sidebar*). Put Streamlit logic inside the pages and call the functions with workflow logic from from the `src` directory (for our examples `src/simple_workflow.py` and `src/mzmlfileworkflow.py`). - -### Complex workflow using TOPP tools - -This template app features a module in `src/workflow` that allows for complex and long workflows to be built very efficiently. Check out the `TOPP Workflow Framework` page for more information (on the *sidebar*). -""") \ No newline at end of file diff --git "a/pages/3_\360\237\223\226_TOPP_Workflow_Framework.py" "b/pages/3_\360\237\223\226_TOPP_Workflow_Framework.py" deleted file mode 100644 index 677f02c75..000000000 --- "a/pages/3_\360\237\223\226_TOPP_Workflow_Framework.py" +++ /dev/null @@ -1,254 +0,0 @@ -import streamlit as st -from src.Workflow import Workflow -from src.workflow.StreamlitUI import StreamlitUI -from src.workflow.FileManager import FileManager -from src.workflow.CommandExecutor import CommandExecutor -from src.common import page_setup -from inspect import getsource - -page_setup() - -wf = Workflow() - -st.title("πŸ“– TOPP Workflow Framework Documentation") - -st.markdown( -""" -## Features - -- streamlined methods for uploading files, setting parameters, and executing workflows -- automatic parameter handling -- quickly build parameter interface for TOPP tools with all parameters from *ini* files -- automatically create a log file for each workflow run with stdout and stderr -- workflow output updates automatically in short intervalls -- user can leave the app and return to the running workflow at any time -- quickly build a workflow with multiple steps channelling files between steps -""" -) - -st.markdown( -""" -## Quickstart - -This repository contains a module in `src/workflow` that provides a framework for building and running analysis workflows. - -The `WorkflowManager` class provides the core workflow logic. It uses the `Logger`, `FileManager`, `ParameterManager`, and `CommandExecutor` classes to setup a complete workflow logic. - -To build your own workflow edit the file `src/TOPPWorkflow.py`. Use any streamlit components such as tabs (as shown in example), columns, or even expanders to organize the helper functions for displaying file upload and parameter widgets. - -> πŸ’‘ Simply set a name for the workflow and overwrite the **`upload`**, **`configure`**, **`execution`** and **`results`** methods in your **`Workflow`** class. - -The file `pages/6_TOPP-Workflow.py` displays the workflow content and can, but does not have to be modified. - -The `Workflow` class contains four important members, which you can use to build your own workflow: - -> **`self.params`:** dictionary of parameters stored in a JSON file in the workflow directory. Parameter handling is done automatically. Default values are defined in input widgets and non-default values are stored in the JSON file. - -> **`self.ui`:** object of type `StreamlitUI` contains helper functions for building the parameter and file upload widgets. - -> **`self.executor`:** object of type `CommandExecutor` can be used to run any command line tool alone or in parallel and includes a convenient method for running TOPP tools. - -> **`self.logger`:** object of type `Logger` to write any output to a log file during workflow execution. - -> **`self.file_manager`:** object of type `FileManager` to handle file types and creation of output directories. -""" -) - -with st.expander("**Complete example for custom Workflow class**", expanded=False): - st.code(getsource(Workflow)) - -st.markdown( -""" -## File Upload - -All input files for the workflow will be stored within the workflow directory in the subdirectory `input-files` within it's own subdirectory for the file type. - -The subdirectory name will be determined by a **key** that is defined in the `self.ui.upload_widget` method. The uploaded files are available by the specific key for parameter input widgets and accessible while building the workflow. - -Calling this method will create a complete file upload widget section with the following components: - -- file uploader -- list of currently uploaded files with this key (or a warning if there are none) -- button to delete all files - -Fallback files(s) can be specified, which will be used if the user doesn't upload any files. This can be useful for example for database files where a default is provided. -""") - -st.code(getsource(Workflow.upload)) - -st.info("πŸ’‘ Use the same **key** for parameter widgets, to select which of the uploaded files to use for analysis.") - -with st.expander("**Code documentation:**", expanded=True): - st.help(StreamlitUI.upload_widget) - -st.markdown( - """ -## Parameter Input - -The paramter section is already pre-defined as a form with buttons to **save parameters** and **load defaults** and a toggle to show TOPP tool parameters marked as advanced. - -Generating parameter input widgets is done with the `self.ui.input` method for any parameter and the `self.ui.input_TOPP` method for TOPP tools. - -**1. Choose `self.ui.input_widget` for any paramter not-related to a TOPP tool or `self.ui.select_input_file` for any input file:** - -It takes the obligatory **key** parameter. The key is used to access the parameter value in the workflow parameters dictionary `self.params`. Default values do not need to be specified in a separate file. Instead they are determined from the widgets default value automatically. Widget types can be specified or automatically determined from **default** and **options** parameters. It's suggested to add a **help** text and other parameters for numerical input. - -Make sure to match the **key** of the upload widget when calling `self.ui.input_TOPP`. - -**2. Choose `self.ui.input_TOPP` to automatically generate complete input sections for a TOPP tool:** - -It takes the obligatory **topp_tool_name** parameter and generates input widgets for each parameter present in the **ini** file (automatically created) except for input and output file parameters. For all input file parameters a widget needs to be created with `self.ui.select_input_file` with an appropriate **key**. For TOPP tool parameters only non-default values are stored. - -**3. Choose `self.ui.input_python` to automatically generate complete input sections for a custom Python tool:** - -Takes the obligatory **script_file** argument. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Parameters need to be specified in the Python script in the **DEFAULTS** variable with the mandatory **key** and **value** parameters. -""") - -with st.expander("Options to use as dictionary keys for parameter definitions (see `src/python-tools/example.py` for an example)"): - st.markdown(""" -**Mandatory** keys for each parameter -- *key:* a unique identifier -- *value:* the default value - -**Optional** keys for each parameter -- *name:* the name of the parameter -- *hide:* don't show the parameter in the parameter section (e.g. for **input/output files**) -- *options:* a list of valid options for the parameter -- *min:* the minimum value for the parameter (int and float) -- *max:* the maximum value for the parameter (int and float) -- *step_size:* the step size for the parameter (int and float) -- *help:* a description of the parameter -- *widget_type:* the type of widget to use for the parameter (default: auto) -- *advanced:* whether or not the parameter is advanced (default: False) -""") - -st.code( -getsource(Workflow.configure) -) -st.info("πŸ’‘ Access parameter widget values by their **key** in the `self.params` object, e.g. `self.params['mzML-files']` will give all selected mzML files.") - -with st.expander("**Code documentation**", expanded=True): - st.help(StreamlitUI.input_widget) - st.help(StreamlitUI.select_input_file) - st.help(StreamlitUI.input_TOPP) - st.help(StreamlitUI.input_python) -st.markdown( - """ -## Building the Workflow - -Building the workflow involves **calling all (TOPP) tools** using **`self.executor`** with **input and output files** based on the **`FileManager`** class. For TOPP tools non-input-output parameters are handled automatically. Parameters for other processes and workflow logic can be accessed via widget keys (set in the parameter section) in the **`self.params`** dictionary. - -### FileManager - -The `FileManager` class serves as an interface for unified input and output files with useful functionality specific to building workflows, such as **setting a (new) file type** and **subdirectory in the workflows result directory**. - -Use the **`get_files`** method to get a list of all file paths as strings. - -Optionally set the following parameters modify the files: - -- **set_file_type** (str): set new file types and result subdirectory. -- **set_results_dir** (str): set a new subdirectory in the workflows result directory. -- **collect** (bool): collect all files into a single list. Will return a list with a single entry, which is a list of all files. Useful to pass to tools which can handle multiple input files at once. -""") - -st.code( - """ -# Get all file paths as strings from self.param entry. -mzML_files = self.file_manager.get_files(self.params["mzML-files]) -# mzML_files = ['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML'] - -# Creating output files for a TOPP tool, setting a new file type and result subdirectory name. -feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="feature-detection") -# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Treatment.featureXML'] - -# Setting a name for the output directory automatically (useful if you never plan to access these files in the results section). -feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="auto") -# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Treatment.featureXML'] - -# Combining all mzML files to be passed to a TOPP tool in a single run. Using "collected" files as argument for self.file_manager.get_files will "un-collect" them. -mzML_files = self.file_manager.get_files(mzML_files, collect=True) -# mzML_files = [['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML']] - """ -) - -with st.expander("**Code documentation**", expanded=True): - st.help(FileManager.get_files) - -st.markdown( - """ -### Running commands - -It is possible to execute any command line command using the **`self.executor`** object, either a single command or a list of commands in parallel. Furthermore a method to run TOPP tools is included. - -**1. Single command** - -The `self.executor.run_command` method takes a single command as input and optionally logs stdout and stderr to the workflow log (default True). -""") - -st.code(""" -self.executor.run_command(["command", "arg1", "arg2", ...]) -""") - -st.markdown( - """ -**2. Run multiple commands in parallel** - -The `self.executor.run_multiple_commands` method takes a list of commands as inputs. - -**3. Run TOPP tools** - -The `self.executor.run_topp` method takes a TOPP tool name as input and a dictionary of input and output files as input. The **keys** need to match the actual input and output parameter names of the TOPP tool. The **values** should be of type `FileManager`. All other **non-default parameters (from input widgets)** will be passed to the TOPP tool automatically. - -Depending on the number of input files, the TOPP tool will be run either in parallel or in a single run (using **`FileManager.collect`**). -""") - -st.info("""πŸ’‘ **Input and output file order** - -In many tools, a single input file is processed to produce a single output file. -When dealing with lists of input or output files, the convention is that -files are paired based on their order. For instance, the n-th input file is -assumed to correspond to the n-th output file, maintaining a structured -relationship between input and output data. -""") -st.code(""" -# e.g. FeatureFinderMetabo takes single input files -in_files = self.file_manager.get_files(["sample1.mzML", "sample2.mzML"]) -out_files = self.file_manager.get_files(in_files, set_file_type="featureXML", set_results_dir="feature-detection") - -# Run FeatureFinderMetabo tool with input and output files in parallel for each pair of input/output files. -self.executor.run_topp("FeatureFinderMetabo", input_output={"in": in_files, "out": out_files}) -# FeaturFinderMetabo -in sample1.mzML -out workspace-dir/results/feature-detection/sample1.featureXML -# FeaturFinderMetabo -in sample2.mzML -out workspace-dir/results/feature-detection/sample2.featureXML - -# Run SiriusExport tool with mutliple input and output files. -out = self.file_manager.get_files("sirius.ms", set_results_dir="sirius-export") -self.executor.run_topp("SiriusExport", {"in": self.file_manager.get_files(in_files, collect=True), - "in_featureinfo": self.file_manager.get_files(out_files, collect=True), - "out": out_se}) -# SiriusExport -in sample1.mzML sample2.mzML -in_featureinfo sample1.featureXML sample2.featureXML -out sirius.ms - """) - -st.markdown(""" -**4. Run custom Python scripts** - -Sometimes it is useful to run custom Python scripts, for example for extra functionality which is not included in a TOPP tool. - -`self.executor.run_python` works similar to `self.executor.run_topp`, but takes a single Python script as input instead of a TOPP tool name. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Input and output file parameters need to be specified in the **input_output** dictionary. -""") - -st.code(""" -# e.g. example Python tool which modifies mzML files in place based on experimental design -self.ui.input_python(script_file="example", input_output={"in": in_mzML, "in_experimantal_design": FileManager(["path/to/experimantal-design.tsv"])}) - """) - -st.markdown("**Example for a complete workflow section:**") - -st.code( -getsource(Workflow.execution) -) - -with st.expander("**Code documentation**", expanded=True): - st.help(CommandExecutor.run_command) - st.help(CommandExecutor.run_multiple_commands) - st.help(CommandExecutor.run_topp) - st.help(CommandExecutor.run_python) \ No newline at end of file diff --git "a/pages/4_\360\237\223\226_Windows_executable.py" "b/pages/4_\360\237\223\226_Windows_executable.py" deleted file mode 100644 index 14322530a..000000000 --- "a/pages/4_\360\237\223\226_Windows_executable.py" +++ /dev/null @@ -1,78 +0,0 @@ -import streamlit as st -import requests - -import streamlit as st - -# Define CSS styles -css = ''' - -''' - - -st.markdown(css, unsafe_allow_html=True) - -st.markdown(""" -# πŸ’» How to package everything for Windows executables - -This guide explains how to package OpenMS apps into Windows executables using two different methods: -""") - -def fetch_markdown_content(url): - response = requests.get(url) - if response.status_code == 200: - # Remove the first line from the content - content_lines = response.text.split("\n") - markdown_content = "\n".join(content_lines[1:]) - return markdown_content - else: - return None - -tabs = ["embeddable Python", "PyInstaller"] -tabs = st.tabs(tabs) - -# window executable with embeddable python -with tabs[0]: - markdown_url = "https://raw.githubusercontent.com/OpenMS/streamlit-template/main/win_exe_with_embed_py.md" - - markdown_content = fetch_markdown_content(markdown_url) - - if markdown_content: - st.markdown(markdown_content, unsafe_allow_html=True) - else: - st.error("Failed to fetch Markdown content from the specified URL.", markdown_url) - -# window executable with pyinstaller -with tabs[1]: - # URL of the Markdown document - markdown_url = "https://raw.githubusercontent.com/OpenMS/streamlit-template/main/win_exe_with_pyinstaller.md" - - markdown_content = fetch_markdown_content(markdown_url) - - if markdown_content: - st.markdown(markdown_content, unsafe_allow_html=True) - else: - st.error("Failed to fetch Markdown content from the specified URL. ", markdown_url) - - diff --git "a/pages/5_\360\237\223\226_Deployment.py" "b/pages/5_\360\237\223\226_Deployment.py" deleted file mode 100644 index 1a857be7c..000000000 --- "a/pages/5_\360\237\223\226_Deployment.py" +++ /dev/null @@ -1,15 +0,0 @@ -import streamlit as st -import requests - -from src.common import page_setup - -page_setup() - -url = "https://raw.githubusercontent.com/OpenMS/streamlit-deployment/main/README.md" - -response = requests.get(url) - -if response.status_code == 200: - st.markdown(response.text) # or process the content as needed -else: - st.warning("Failed to get README from streamlit-deployment repository.") \ No newline at end of file From 3f0a0892e3811c2b9b43e950adb739b1876f2900 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Tue, 9 Jul 2024 15:14:25 +0200 Subject: [PATCH 05/24] rework multipage app - using the new st.navigation - creates sections in sidebar - icons don't need to be in page name --- app.py | 85 +++++-------------- .../{0_Documentation.py => documentation.py} | 2 +- .../file_upload.py | 0 pages/quickstart.py | 65 ++++++++++++++ .../raw_data_viewer.py | 0 ..._mzML_files.py => run_example_workflow.py} | 0 ...13_Run_subprocess.py => run_subprocess.py} | 0 ..._Simple_Workflow.py => simple_workflow.py} | 0 .../{14_TOPP-Workflow.py => topp_workflow.py} | 0 9 files changed, 88 insertions(+), 64 deletions(-) rename pages/{0_Documentation.py => documentation.py} (99%) rename "pages/10_\360\237\223\201_File_Upload.py" => pages/file_upload.py (100%) create mode 100644 pages/quickstart.py rename "pages/11_\360\237\221\200_View_Raw_Data.py" => pages/raw_data_viewer.py (100%) rename pages/{15_Workflow_with_mzML_files.py => run_example_workflow.py} (100%) rename pages/{13_Run_subprocess.py => run_subprocess.py} (100%) rename pages/{12_Simple_Workflow.py => simple_workflow.py} (100%) rename pages/{14_TOPP-Workflow.py => topp_workflow.py} (100%) diff --git a/app.py b/app.py index ace5767d3..cbdf70ab3 100644 --- a/app.py +++ b/app.py @@ -1,65 +1,24 @@ -""" -Main page for the OpenMS Template App. - -This module sets up and displays the Streamlit app for the OpenMS Template App. -It includes: -- Setting the app title. -- Displaying a description. -- Providing a download button for the Windows version of the app. - -Usage: -Run this script to launch the OpenMS Template App. - -Note: -- If run in local mode, the CAPTCHA control is not applied. -- If not in local mode, CAPTCHA control is applied to verify the user. - -Returns: - None -""" - -from pathlib import Path import streamlit as st +from pathlib import Path -from src.common import page_setup - -page_setup(page="main") - -st.title("OpenMS Streamlit Template App") -st.markdown(""" -This repository contains a template app for OpenMS workflows in a web application using the **streamlit** framework. - -It serves as a foundation for apps ranging from simple workflows with **pyOpenMS** to complex workflows utilizing **OpenMS TOPP tools** with parallel execution. - -It includes solutions for handling user data and parameters in workspaces as well as deployment with docker-compose. -""") -st.subheader("Features") -st.markdown(""" -- Workspaces for user data with unique shareable IDs -- Persistent parameters and input files within a workspace -- Captcha control -- Packaged executables for Windows -- framework for workflows with OpenMS TOPP tools -- Deployment [with docker-compose](https://github.com/OpenMS/streamlit-deployment) -""") -st.subheader("Quick Start") -if Path("OpenMS-App.zip").exists(): - st.markdown(""" -Download the latest version for Windows here by clicking the button below. -""") - with open("OpenMS-App.zip", "rb") as file: - st.download_button( - label="Download for Windows", - data=file, - file_name="OpenMS-App.zip", - mime="archive/zip", - type="primary", - ) - st.markdown(""" -Extract the zip file and run the executable (.exe) file to launch the app. Since every dependency is compressed and packacked the app will take a while to launch (up to one minute). -""") -st.markdown(""" -Check out the documentation for **users** and **developers** is included as pages indicated by the πŸ“– icon - -Try the example pages **πŸ“ mzML file upload**, **πŸ‘€ visualization** and **example workflows**. -""") \ No newline at end of file +pages = { + "OpenMS Web App" : [ + st.Page(Path("pages", "quickstart.py"), title="Quickstart", icon="πŸ‘‹"), + st.Page(Path("pages", "documentation.py"), title="Documentation", icon="πŸ“–"), + ], + "TOPP Workflow Framework": [ + st.Page(Path("pages", "topp_workflow.py"), title="TOPP Workflow", icon="πŸš€"), + ], + "Example MS Workflow" : [ + st.Page(Path("pages", "file_upload.py"), title="File Upload", icon="πŸ“‚"), + st.Page(Path("pages", "raw_data_viewer.py"), title="View MS data", icon="πŸ‘€"), + st.Page(Path("pages", "run_example_workflow.py"), title="Run Workflow", icon="βš™οΈ"), + ], + "Others Topics": [ + st.Page(Path("pages", "simple_workflow.py"), title="Simple Workflow", icon="βš™οΈ"), + st.Page(Path("pages", "run_subprocess.py"), title="Run Subprocess", icon="πŸ–₯️"), + ] +} + +pg = st.navigation(pages) +pg.run() \ No newline at end of file diff --git a/pages/0_Documentation.py b/pages/documentation.py similarity index 99% rename from pages/0_Documentation.py rename to pages/documentation.py index 436e45929..e556e571e 100644 --- a/pages/0_Documentation.py +++ b/pages/documentation.py @@ -11,7 +11,7 @@ page_setup() -st.title("πŸ“– Documentation") +st.title("Documentation") cols = st.columns(2) diff --git "a/pages/10_\360\237\223\201_File_Upload.py" b/pages/file_upload.py similarity index 100% rename from "pages/10_\360\237\223\201_File_Upload.py" rename to pages/file_upload.py diff --git a/pages/quickstart.py b/pages/quickstart.py new file mode 100644 index 000000000..ace5767d3 --- /dev/null +++ b/pages/quickstart.py @@ -0,0 +1,65 @@ +""" +Main page for the OpenMS Template App. + +This module sets up and displays the Streamlit app for the OpenMS Template App. +It includes: +- Setting the app title. +- Displaying a description. +- Providing a download button for the Windows version of the app. + +Usage: +Run this script to launch the OpenMS Template App. + +Note: +- If run in local mode, the CAPTCHA control is not applied. +- If not in local mode, CAPTCHA control is applied to verify the user. + +Returns: + None +""" + +from pathlib import Path +import streamlit as st + +from src.common import page_setup + +page_setup(page="main") + +st.title("OpenMS Streamlit Template App") +st.markdown(""" +This repository contains a template app for OpenMS workflows in a web application using the **streamlit** framework. + +It serves as a foundation for apps ranging from simple workflows with **pyOpenMS** to complex workflows utilizing **OpenMS TOPP tools** with parallel execution. + +It includes solutions for handling user data and parameters in workspaces as well as deployment with docker-compose. +""") +st.subheader("Features") +st.markdown(""" +- Workspaces for user data with unique shareable IDs +- Persistent parameters and input files within a workspace +- Captcha control +- Packaged executables for Windows +- framework for workflows with OpenMS TOPP tools +- Deployment [with docker-compose](https://github.com/OpenMS/streamlit-deployment) +""") +st.subheader("Quick Start") +if Path("OpenMS-App.zip").exists(): + st.markdown(""" +Download the latest version for Windows here by clicking the button below. +""") + with open("OpenMS-App.zip", "rb") as file: + st.download_button( + label="Download for Windows", + data=file, + file_name="OpenMS-App.zip", + mime="archive/zip", + type="primary", + ) + st.markdown(""" +Extract the zip file and run the executable (.exe) file to launch the app. Since every dependency is compressed and packacked the app will take a while to launch (up to one minute). +""") +st.markdown(""" +Check out the documentation for **users** and **developers** is included as pages indicated by the πŸ“– icon + +Try the example pages **πŸ“ mzML file upload**, **πŸ‘€ visualization** and **example workflows**. +""") \ No newline at end of file diff --git "a/pages/11_\360\237\221\200_View_Raw_Data.py" b/pages/raw_data_viewer.py similarity index 100% rename from "pages/11_\360\237\221\200_View_Raw_Data.py" rename to pages/raw_data_viewer.py diff --git a/pages/15_Workflow_with_mzML_files.py b/pages/run_example_workflow.py similarity index 100% rename from pages/15_Workflow_with_mzML_files.py rename to pages/run_example_workflow.py diff --git a/pages/13_Run_subprocess.py b/pages/run_subprocess.py similarity index 100% rename from pages/13_Run_subprocess.py rename to pages/run_subprocess.py diff --git a/pages/12_Simple_Workflow.py b/pages/simple_workflow.py similarity index 100% rename from pages/12_Simple_Workflow.py rename to pages/simple_workflow.py diff --git a/pages/14_TOPP-Workflow.py b/pages/topp_workflow.py similarity index 100% rename from pages/14_TOPP-Workflow.py rename to pages/topp_workflow.py From f54f40490c1e8c4127a6b7df691df7bbc10d6e41 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Tue, 9 Jul 2024 15:29:29 +0200 Subject: [PATCH 06/24] remove name-main in topp workflow to work with st.navigation --- pages/topp_workflow.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/pages/topp_workflow.py b/pages/topp_workflow.py index 0f5f53c6a..ab4bc4750 100644 --- a/pages/topp_workflow.py +++ b/pages/topp_workflow.py @@ -2,25 +2,23 @@ from src.common import page_setup from src.Workflow import Workflow -# The rest of the page can, but does not have to be changed -if __name__ == "__main__": - - params = page_setup() +# # The rest of the page can, but does not have to be changed +params = page_setup() - wf = Workflow() +wf = Workflow() - st.title(wf.name) +st.title(wf.name) - t = st.tabs(["πŸ“ **File Upload**", "βš™οΈ **Configure**", "πŸš€ **Run**", "πŸ“Š **Results**"]) - with t[0]: - wf.show_file_upload_section() +t = st.tabs(["πŸ“ **File Upload**", "βš™οΈ **Configure**", "πŸš€ **Run**", "πŸ“Š **Results**"]) +with t[0]: + wf.show_file_upload_section() - with t[1]: - wf.show_parameter_section() +with t[1]: + wf.show_parameter_section() - with t[2]: - wf.show_execution_section() - - with t[3]: - wf.show_results_section() +with t[2]: + wf.show_execution_section() + +with t[3]: + wf.show_results_section() From 0774086b56564f1619ed1acf4a7408046786991c Mon Sep 17 00:00:00 2001 From: axelwalter Date: Tue, 9 Jul 2024 15:30:13 +0200 Subject: [PATCH 07/24] update sidebar - icon on top (can be changed to 24*240px banner later) - workspace selector on every page in collapsed expander --- src/common.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/common.py b/src/common.py index edb26b247..7bb4ba6ea 100644 --- a/src/common.py +++ b/src/common.py @@ -101,6 +101,8 @@ def page_setup(page: str = "") -> dict[str, Any]: menu_items=None, ) + st.logo("assets/pyopenms_transparent_background.png") + # Determine the workspace for the current session if "workspace" not in st.session_state: # Clear any previous caches @@ -159,8 +161,7 @@ def render_sidebar(page: str = "") -> None: params = load_params() with st.sidebar: # The main page has workspace switcher - if page == "main": - st.markdown("πŸ–₯️ **Workspaces**") + with st.expander("πŸ–₯️ **Workspaces**"): # Define workspaces directory outside of repository workspaces_dir = Path("..", "workspaces-" + REPOSITORY_NAME) # Online: show current workspace name in info text and option to change to other existing workspace @@ -230,9 +231,6 @@ def change_workspace(): img_formats.index(params["image-format"]), key="image-format", ) - if page != "main": - st.info(f"**{Path(st.session_state['workspace']).stem}**") - st.image("assets/OpenMS.png", "powered by") return params From b8d28284fd193a0f7cbc501a0633376773eb8619 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 09:25:09 +0200 Subject: [PATCH 08/24] update TOPP workflow example - simple workflow with feature detection and linking - export consensus map to dataframe with python script - added interactive result section --- src/Workflow.py | 66 ++++++++++++------- src/python-tools/.gitignore | 2 +- .../export_consensus_feature_df.py | 46 +++++++++++++ 3 files changed, 89 insertions(+), 25 deletions(-) create mode 100644 src/python-tools/export_consensus_feature_df.py diff --git a/src/Workflow.py b/src/Workflow.py index a403eb572..8c8396766 100644 --- a/src/Workflow.py +++ b/src/Workflow.py @@ -1,6 +1,12 @@ import streamlit as st from .workflow.WorkflowManager import WorkflowManager +# for result section: +from pathlib import Path +import pandas as pd +import plotly.express as px +from .common import show_fig + class Workflow(WorkflowManager): # Setup pages for upload, parameter, execution and results. # For layout use any streamlit components such as tabs (as shown in example), columns, or even expanders. @@ -23,20 +29,17 @@ def configure(self) -> None: # Create tabs for different analysis steps. t = st.tabs( - ["**Feature Detection**", "**Adduct Detection**", "**SIRIUS Export**", "**Python Custom Tool**"] + ["**Feature Detection**", "**Feature Linking**", "**Python Custom Tool**"] ) with t[0]: # Parameters for FeatureFinderMetabo TOPP tool. self.ui.input_TOPP("FeatureFinderMetabo", custom_defaults={"algorithm:common:noise_threshold_int": 1000.0}) with t[1]: - # A single checkbox widget for workflow logic. - self.ui.input_widget("run-adduct-detection", False, "Adduct Detection") # Paramters for MetaboliteAdductDecharger TOPP tool. - self.ui.input_TOPP("MetaboliteAdductDecharger") + self.ui.input_TOPP("FeatureLinkerUnlabeledKD") with t[2]: - # Paramters for SiriusExport TOPP tool - self.ui.input_TOPP("SiriusExport") - with t[3]: + # A single checkbox widget for workflow logic. + self.ui.input_widget("run-python-script", False, "Run custom Python script") # Generate input widgets for a custom Python tool, located at src/python-tools. # Parameters are specified within the file in the DEFAULTS dictionary. self.ui.input_python("example") @@ -57,27 +60,42 @@ def execution(self) -> None: out_ffm = self.file_manager.get_files(in_mzML, "featureXML", "feature-detection") # Run FeatureFinderMetabo tool with input and output files. + self.logger.log("Detecting features...") self.executor.run_topp( "FeatureFinderMetabo", input_output={"in": in_mzML, "out": out_ffm} ) - # Check if adduct detection should be run. - if self.params["run-adduct-detection"]: - - # Run MetaboliteAdductDecharger for adduct detection, with disabled logs. - # Without a new file list for output, the input files will be overwritten in this case. - self.executor.run_topp( - "MetaboliteAdductDecharger", {"in": out_ffm, "out_fm": out_ffm} - ) - - # Example for a custom Python tool, which is located in src/python-tools. - self.executor.run_python("example", {"in": in_mzML}) + # Prepare input and output files for feature linking + in_fl = self.file_manager.get_files(out_ffm, collect=True) + out_fl = self.file_manager.get_files("feature_matrix.consensusXML", set_results_dir="feature-linking") - # Prepare output file for SiriusExport. - out_se = self.file_manager.get_files("sirius.ms", set_results_dir="sirius-export") - self.executor.run_topp("SiriusExport", {"in": self.file_manager.get_files(in_mzML, collect=True), - "in_featureinfo": self.file_manager.get_files(out_ffm, collect=True), - "out": out_se}) + # Run FeatureLinkerUnlabaeledKD with all feature maps passed at once + self.logger.log("Linking features...") + self.executor.run_topp("FeatureLinkerUnlabeledKD", input_output={"in": in_fl, "out": out_fl}) + self.logger.log("Exporting consensus features to pandas DataFrame...") + self.executor.run_python("export_consensus_feature_df", input_output={"in": out_fl[0]}) + # Check if adduct detection should be run. + if self.params["run-python-script"]: + # Example for a custom Python tool, which is located in src/python-tools. + self.executor.run_python("example", {"in": in_mzML}) def results(self) -> None: - st.warning("Not implemented yet.") \ No newline at end of file + @st.experimental_fragment + def show_consensus_features(): + df = pd.read_csv(file, sep="\t", index_col=0) + st.metric("number of consensus features", df.shape[0]) + c1, c2 = st.columns(2) + rows = c1.dataframe(df, selection_mode="multi-row", on_select="rerun")["selection"]["rows"] + if rows: + df = df.iloc[rows, 4:] + fig = px.bar(df, barmode="group", labels={"value": "intensity"}) + with c2: + show_fig(fig, "consensus-feature-intensities") + else: + st.info("πŸ’‘ Select one ore more rows in the table to show a barplot with intensities.") + + file = Path(self.workflow_dir, "results", "feature-linking", "feature_matrix.tsv") + if file.exists(): + show_consensus_features() + else: + st.warning("No consensus feature file found. Please run workflow first.") \ No newline at end of file diff --git a/src/python-tools/.gitignore b/src/python-tools/.gitignore index ed8ebf583..763624ebe 100644 --- a/src/python-tools/.gitignore +++ b/src/python-tools/.gitignore @@ -1 +1 @@ -__pycache__ \ No newline at end of file +__pycache__/* \ No newline at end of file diff --git a/src/python-tools/export_consensus_feature_df.py b/src/python-tools/export_consensus_feature_df.py new file mode 100644 index 000000000..03be10a85 --- /dev/null +++ b/src/python-tools/export_consensus_feature_df.py @@ -0,0 +1,46 @@ +import json +import sys +from pyopenms import ConsensusXMLFile, ConsensusMap +from pathlib import Path + +############################ +# default paramter values # +########################### +# +# Mandatory keys for each parameter +# key: a unique identifier +# value: the default value +# +# Optional keys for each parameter +# name: the name of the parameter +# hide: don't show the parameter in the parameter section (e.g. for input/output files) +# options: a list of valid options for the parameter +# min: the minimum value for the parameter (int and float) +# max: the maximum value for the parameter (int and float) +# step_size: the step size for the parameter (int and float) +# help: a description of the parameter +# widget_type: the type of widget to use for the parameter (default: auto) +# advanced: whether or not the parameter is advanced (default: False) + +DEFAULTS = [ + {"key": "in", "value": "", "help": "Input consensusXML file.", "hide": True}, +] + +def get_params(): + if len(sys.argv) > 1: + with open(sys.argv[1], "r") as f: + return json.load(f) + else: + return {} + +if __name__ == "__main__": + params = get_params() + # Add code here: + cm = ConsensusMap() + ConsensusXMLFile().load(params["in"], cm) + df = cm.get_df() + df = df.rename(columns={col: Path(col).name for col in df.columns}) + df = df.reset_index() + df = df.drop(columns=["id", "sequence"]) + df.insert(0, "metabolite", df.apply(lambda x: f"{round(x['mz'], 4)}@{round(x['RT'], 2)}", axis=1)) + df.to_csv(Path(params["in"]).with_suffix(".tsv"), sep="\t", index=False) \ No newline at end of file From a886fecd4c61626890b025ddd109fb69852317da Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 09:28:43 +0200 Subject: [PATCH 09/24] TOPP workflow start-stop button alignement --- src/workflow/StreamlitUI.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/workflow/StreamlitUI.py b/src/workflow/StreamlitUI.py index 7b64b2bb6..aab4862f8 100644 --- a/src/workflow/StreamlitUI.py +++ b/src/workflow/StreamlitUI.py @@ -729,12 +729,11 @@ def execution_section(self, start_workflow_function) -> None: c1, c2 = st.columns(2) # Select log level, this can be changed at run time or later without re-running the workflow log_level = c1.selectbox("log details", ["minimal", "commands and run times", "all"], key="log_level") - c2.markdown("##") if self.executor.pid_dir.exists(): - if c2.button("Stop Workflow", type="primary", use_container_width=True): + if c1.button("Stop Workflow", type="primary", use_container_width=True): self.executor.stop() st.rerun() - elif st.button("Start Workflow", type="primary", use_container_width=True): + elif c1.button("Start Workflow", type="primary", use_container_width=True): start_workflow_function() st.rerun() log_path = Path(self.workflow_dir, "logs", log_level.replace(" ", "-") + ".log") From 1034a861fa1be0ab5edfb3fc40ad1c666513a564 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 09:54:28 +0200 Subject: [PATCH 10/24] remove unused file --- src/ini2dec.py | 60 -------------------------------------------------- 1 file changed, 60 deletions(-) delete mode 100644 src/ini2dec.py diff --git a/src/ini2dec.py b/src/ini2dec.py deleted file mode 100644 index 57b1661fb..000000000 --- a/src/ini2dec.py +++ /dev/null @@ -1,60 +0,0 @@ -# Take parameters values from tool config file (.ini) -# Define the sections you want to extract -# sections = ["missed_cleavages"]#let suppose we extract tool parameter: missed cleavages - -# path of .ini file (# placed executable .ini file in assets) -# config_path = os.path.join(os.getcwd(), 'assets', 'exec.ini') - -# take dictionary of parameters -# exec_config=ini2dict(config_path, sections) - -# (will give every section as 1 entry: -# entry = { -# "name": node_name, -# "default": node_default, -# "description": node_desc, -# "restrictions": restrictions_list -# }) - -# take all variables settings from config dictionary -# by create form take parameter values -# for example missed_cleavages -# Missed_cleavages = str(st.number_input("Missed_cleavages",value=int(exec_config['missed_cleavages']['default']), help=exec_config['missed_cleavages']['description'] + " default: "+ exec_config['missed_cleavages']['default'])) - -import xml.etree.ElementTree as ET - - -def ini2dict(path: str, sections: list): - """Converts a OpenMS ini file to dictionary.""" - # Parse the XML configuration - tree = ET.parse(path) - root = tree.getroot() - - # Initialize an empty dictionary to store the extracted information - config_dict = {} - - # Iterate through sections and store information in the dictionary - for section_name in sections: - for node in root.findall( - f".//ITEMLIST[@name='{section_name}']" - ) or root.findall(f".//ITEM[@name='{section_name}']"): - # can adapt depends on tool - node_name = str(node.get("name")) - node_default = str(node.get("value")) - node_desc = str(node.get("description")) - node_rest = str(node.get("restrictions")) - - # generate list - restrictions_list = node_rest.split(",") if node_rest else [] - - entry = { - "name": node_name, - "default": node_default, - "description": node_desc, - "restrictions": restrictions_list, - } - - # Store the entry in the section dictionary - config_dict[section_name] = entry - - return config_dict From d43264bd7f78bc8c44936084dfb07384bc6ce440 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 12:29:31 +0200 Subject: [PATCH 11/24] quickstart page --- app.py | 2 +- pages/.gitignore | 1 + pages/quickstart.py | 125 ++++++++++++++++++++++++++++++++++++-------- 3 files changed, 106 insertions(+), 22 deletions(-) create mode 100644 pages/.gitignore diff --git a/app.py b/app.py index cbdf70ab3..1dd93f3e6 100644 --- a/app.py +++ b/app.py @@ -9,7 +9,7 @@ "TOPP Workflow Framework": [ st.Page(Path("pages", "topp_workflow.py"), title="TOPP Workflow", icon="πŸš€"), ], - "Example MS Workflow" : [ + "pyOpenMS Workflow" : [ st.Page(Path("pages", "file_upload.py"), title="File Upload", icon="πŸ“‚"), st.Page(Path("pages", "raw_data_viewer.py"), title="View MS data", icon="πŸ‘€"), st.Page(Path("pages", "run_example_workflow.py"), title="Run Workflow", icon="βš™οΈ"), diff --git a/pages/.gitignore b/pages/.gitignore new file mode 100644 index 000000000..763624ebe --- /dev/null +++ b/pages/.gitignore @@ -0,0 +1 @@ +__pycache__/* \ No newline at end of file diff --git a/pages/quickstart.py b/pages/quickstart.py index ace5767d3..5f9f6a2e4 100644 --- a/pages/quickstart.py +++ b/pages/quickstart.py @@ -25,28 +25,29 @@ page_setup(page="main") -st.title("OpenMS Streamlit Template App") -st.markdown(""" -This repository contains a template app for OpenMS workflows in a web application using the **streamlit** framework. +st.title("OpenMS Web App Template") +c1, c2 = st.columns(2) +c1.info( + """ +**πŸ’‘ Template app for OpenMS workflows in a web application using the **streamlit** framework.** -It serves as a foundation for apps ranging from simple workflows with **pyOpenMS** to complex workflows utilizing **OpenMS TOPP tools** with parallel execution. - -It includes solutions for handling user data and parameters in workspaces as well as deployment with docker-compose. -""") -st.subheader("Features") -st.markdown(""" +- Simple workflows with **pyOpenMS** +- Complex workflows utilizing **OpenMS TOPP tools** with parallel execution. - Workspaces for user data with unique shareable IDs - Persistent parameters and input files within a workspace - Captcha control - Packaged executables for Windows -- framework for workflows with OpenMS TOPP tools -- Deployment [with docker-compose](https://github.com/OpenMS/streamlit-deployment) -""") -st.subheader("Quick Start") +- Deploy multiple apps easily with [docker-compose](https://github.com/OpenMS/streamlit-deployment) +""" +) +c2.image("assets/pyopenms_transparent_background.png", width=300) +st.markdown("## πŸ‘‹ Quick Start") if Path("OpenMS-App.zip").exists(): - st.markdown(""" + st.subsubheader( + """ Download the latest version for Windows here by clicking the button below. -""") +""" + ) with open("OpenMS-App.zip", "rb") as file: st.download_button( label="Download for Windows", @@ -55,11 +56,93 @@ mime="archive/zip", type="primary", ) - st.markdown(""" + st.markdown( + """ Extract the zip file and run the executable (.exe) file to launch the app. Since every dependency is compressed and packacked the app will take a while to launch (up to one minute). -""") -st.markdown(""" -Check out the documentation for **users** and **developers** is included as pages indicated by the πŸ“– icon +""" + ) + +st.markdown("### πŸ“– Documentation") +st.markdown( + f""" +This template app includes documentation for **users** including **installation** and introduction to template specific concepts such as **workspaces** and developers with detailed instructions on **how to create and deploy your own app** based on this template. +""" +) +st.page_link( + "pages/documentation.py", + label="Read documentation here, select chapter in the content menu.", + icon="➑️", +) +st.markdown("### Example pages: workflows, visualization and more") +st.markdown( + """ +This app serves both as documentation and showcase what's possible with OpenMS web apps. + +In general there are two options for building workflows. + +#### 1. πŸš€ **TOPP Workflow Framework** + +Use this option if you want a standardized framework for building your workflow. -Try the example pages **πŸ“ mzML file upload**, **πŸ‘€ visualization** and **example workflows**. -""") \ No newline at end of file +- **pre-defined user interface** all in one streamlit page with all steps in different tabs: + - **File Upload**: upload, download and delete input files + - **Configure**: Automatically display input widgets for all paramters in TOPP tools and custom Python scripts + - **Run**: Start and stop workflow execution, includes continous log + - **Results**: Interactive result dashboard +- **write less code**: everything from file upload, input widget generation and execution of tools is handled via convenient functions +- **fast and performant workflows**: Automatic parallel execution of TOPP tools ensures great speed, comparable with workflows written in bash + +""" +) +st.page_link( + "pages/documentation.py", + label="Check out extensive documentation on the TOPP tool framework.", + icon="➑️", +) +st.page_link( + "pages/topp_workflow.py", label="Play around with the example workflow.", icon="➑️" +) +st.markdown( + """ +#### 2. 🐍 **Flexible, custom workflow with pyOpenMS on multiple pages** + +Use this option if you want full control over your workflow implementation and user interface. + +Uses the integrated parameter handling with global parameters across pages, including uploaded files. + +To get an idea check out the following pages from the example worklfow (file upload first!). +""" +) +st.page_link( + "pages/file_upload.py", + label="Upload your own mzML files or use the provided example data set.", + icon="➑️", +) +st.page_link( + "pages/raw_data_viewer.py", + label="Visualize mzML file content in an interactive dashboard.", + icon="➑️", +) +st.page_link( + "pages/run_example_workflow.py", + label="Run a small example workflow with mzML files and check out results.", + icon="➑️", +) + +st.markdown( + """ +#### Other Topics + +Includes other example pages which are independent to showcase other functionalities. +""" +) +st.page_link( + "pages/simple_workflow.py", + label="A very simple worklfow explaining the concepts of data caching in streamlit.", + icon="➑️", +) +st.page_link( + "pages/run_subprocess.py", + label="How to run any command line tool as subprocess from within the OpenMS web app.", + icon="➑️", +) \ No newline at end of file From eb1700955f22b55e852e5f64b4785acd4cf579e8 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 12:34:39 +0200 Subject: [PATCH 12/24] fallback mzML files for TOPP workflow example --- src/Workflow.py | 70 ++++++++++++++++++++++++------------- src/workflow/StreamlitUI.py | 6 ++-- 2 files changed, 49 insertions(+), 27 deletions(-) diff --git a/src/Workflow.py b/src/Workflow.py index 8c8396766..7c1eb5a5e 100644 --- a/src/Workflow.py +++ b/src/Workflow.py @@ -7,6 +7,7 @@ import plotly.express as px from .common import show_fig + class Workflow(WorkflowManager): # Setup pages for upload, parameter, execution and results. # For layout use any streamlit components such as tabs (as shown in example), columns, or even expanders. @@ -14,14 +15,16 @@ def __init__(self) -> None: # Initialize the parent class with the workflow name. super().__init__("TOPP Workflow", st.session_state["workspace"]) - def upload(self)-> None: - t = st.tabs(["MS data", "Example with fallback data"]) + def upload(self) -> None: + t = st.tabs(["MS data"]) with t[0]: # Use the upload method from StreamlitUI to handle mzML file uploads. - self.ui.upload_widget(key="mzML-files", name="MS data", file_type="mzML") - with t[1]: - # Example with fallback data (not used in workflow) - self.ui.upload_widget(key="image", file_type="png", fallback="assets/OpenMS.png") + self.ui.upload_widget( + key="mzML-files", + name="MS data", + file_type="mzML", + fallback=[str(f) for f in Path("example-data", "mzML").glob("*.mzML")], + ) def configure(self) -> None: # Allow users to select mzML files for the analysis. @@ -33,7 +36,10 @@ def configure(self) -> None: ) with t[0]: # Parameters for FeatureFinderMetabo TOPP tool. - self.ui.input_TOPP("FeatureFinderMetabo", custom_defaults={"algorithm:common:noise_threshold_int": 1000.0}) + self.ui.input_TOPP( + "FeatureFinderMetabo", + custom_defaults={"algorithm:common:noise_threshold_int": 1000.0}, + ) with t[1]: # Paramters for MetaboliteAdductDecharger TOPP tool. self.ui.input_TOPP("FeatureLinkerUnlabeledKD") @@ -52,12 +58,14 @@ def execution(self) -> None: # Get mzML files with FileManager in_mzML = self.file_manager.get_files(self.params["mzML-files"]) - + # Log any messages. self.logger.log(f"Number of input mzML files: {len(in_mzML)}") # Prepare output files for feature detection. - out_ffm = self.file_manager.get_files(in_mzML, "featureXML", "feature-detection") + out_ffm = self.file_manager.get_files( + in_mzML, "featureXML", "feature-detection" + ) # Run FeatureFinderMetabo tool with input and output files. self.logger.log("Detecting features...") @@ -67,13 +75,19 @@ def execution(self) -> None: # Prepare input and output files for feature linking in_fl = self.file_manager.get_files(out_ffm, collect=True) - out_fl = self.file_manager.get_files("feature_matrix.consensusXML", set_results_dir="feature-linking") + out_fl = self.file_manager.get_files( + "feature_matrix.consensusXML", set_results_dir="feature-linking" + ) # Run FeatureLinkerUnlabaeledKD with all feature maps passed at once self.logger.log("Linking features...") - self.executor.run_topp("FeatureLinkerUnlabeledKD", input_output={"in": in_fl, "out": out_fl}) + self.executor.run_topp( + "FeatureLinkerUnlabeledKD", input_output={"in": in_fl, "out": out_fl} + ) self.logger.log("Exporting consensus features to pandas DataFrame...") - self.executor.run_python("export_consensus_feature_df", input_output={"in": out_fl[0]}) + self.executor.run_python( + "export_consensus_feature_df", input_output={"in": out_fl[0]} + ) # Check if adduct detection should be run. if self.params["run-python-script"]: # Example for a custom Python tool, which is located in src/python-tools. @@ -82,20 +96,26 @@ def execution(self) -> None: def results(self) -> None: @st.experimental_fragment def show_consensus_features(): - df = pd.read_csv(file, sep="\t", index_col=0) - st.metric("number of consensus features", df.shape[0]) - c1, c2 = st.columns(2) - rows = c1.dataframe(df, selection_mode="multi-row", on_select="rerun")["selection"]["rows"] - if rows: - df = df.iloc[rows, 4:] - fig = px.bar(df, barmode="group", labels={"value": "intensity"}) - with c2: - show_fig(fig, "consensus-feature-intensities") - else: - st.info("πŸ’‘ Select one ore more rows in the table to show a barplot with intensities.") + df = pd.read_csv(file, sep="\t", index_col=0) + st.metric("number of consensus features", df.shape[0]) + c1, c2 = st.columns(2) + rows = c1.dataframe(df, selection_mode="multi-row", on_select="rerun")[ + "selection" + ]["rows"] + if rows: + df = df.iloc[rows, 4:] + fig = px.bar(df, barmode="group", labels={"value": "intensity"}) + with c2: + show_fig(fig, "consensus-feature-intensities") + else: + st.info( + "πŸ’‘ Select one ore more rows in the table to show a barplot with intensities." + ) - file = Path(self.workflow_dir, "results", "feature-linking", "feature_matrix.tsv") + file = Path( + self.workflow_dir, "results", "feature-linking", "feature_matrix.tsv" + ) if file.exists(): show_consensus_features() else: - st.warning("No consensus feature file found. Please run workflow first.") \ No newline at end of file + st.warning("No consensus feature file found. Please run workflow first.") diff --git a/src/workflow/StreamlitUI.py b/src/workflow/StreamlitUI.py index aab4862f8..5d97b56cb 100644 --- a/src/workflow/StreamlitUI.py +++ b/src/workflow/StreamlitUI.py @@ -112,9 +112,10 @@ def upload_widget( if isinstance(fallback, str): fallback = [fallback] for f in fallback: + c1, _ = st.columns(2) if not Path(files_dir, f).exists(): shutil.copy(f, Path(files_dir, Path(f).name)) - st.info(f"Adding default file: **{f}**") + c1.info(f"Adding default file: **{f}**") current_files = [ f.name for f in files_dir.iterdir() @@ -674,7 +675,8 @@ def zip_and_download_files(self, directory: str): def file_upload_section(self, custom_upload_function) -> None: custom_upload_function() - if st.button("⬇️ Download all uploaded files", use_container_width=True): + c1, _ = st.columns(2) + if c1.button("⬇️ Download all uploaded files", use_container_width=True): self.zip_and_download_files(Path(self.workflow_dir, "input-files")) def parameter_section(self, custom_paramter_function) -> None: From 7eff2d60ac908978d63d62a12ee39c06d1131c72 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 12:53:48 +0200 Subject: [PATCH 13/24] use fragments in TOPP workflow --- src/Workflow.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/Workflow.py b/src/Workflow.py index 7c1eb5a5e..3f72334b3 100644 --- a/src/Workflow.py +++ b/src/Workflow.py @@ -15,6 +15,7 @@ def __init__(self) -> None: # Initialize the parent class with the workflow name. super().__init__("TOPP Workflow", st.session_state["workspace"]) + @st.experimental_fragment def upload(self) -> None: t = st.tabs(["MS data"]) with t[0]: @@ -26,6 +27,7 @@ def upload(self) -> None: fallback=[str(f) for f in Path("example-data", "mzML").glob("*.mzML")], ) + @st.experimental_fragment def configure(self) -> None: # Allow users to select mzML files for the analysis. self.ui.select_input_file("mzML-files", multiple=True) @@ -50,6 +52,7 @@ def configure(self) -> None: # Parameters are specified within the file in the DEFAULTS dictionary. self.ui.input_python("example") + @st.experimental_fragment def execution(self) -> None: # Any parameter checks, here simply checking if mzML files are selected if not self.params["mzML-files"]: @@ -93,6 +96,7 @@ def execution(self) -> None: # Example for a custom Python tool, which is located in src/python-tools. self.executor.run_python("example", {"in": in_mzML}) + @st.experimental_fragment def results(self) -> None: @st.experimental_fragment def show_consensus_features(): From ee6b68e9b8aaeb25eed6fe4553a8eb9a32684c91 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 12:54:00 +0200 Subject: [PATCH 14/24] update quickstart page --- pages/quickstart.py | 49 ++++++++++++++++++++++++++++++--------------- 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/pages/quickstart.py b/pages/quickstart.py index 5f9f6a2e4..e9eb1ec0f 100644 --- a/pages/quickstart.py +++ b/pages/quickstart.py @@ -21,16 +21,17 @@ from pathlib import Path import streamlit as st -from src.common import page_setup +from src.common import page_setup, v_space page_setup(page="main") -st.title("OpenMS Web App Template") +st.markdown("# πŸ‘‹ Quick Start") +st.markdown("## Template for OpenMS web apps using the **streamlit** framework") c1, c2 = st.columns(2) -c1.info( +c1.markdown( """ -**πŸ’‘ Template app for OpenMS workflows in a web application using the **streamlit** framework.** - +## ⭐ Features + - Simple workflows with **pyOpenMS** - Complex workflows utilizing **OpenMS TOPP tools** with parallel execution. - Workspaces for user data with unique shareable IDs @@ -40,8 +41,8 @@ - Deploy multiple apps easily with [docker-compose](https://github.com/OpenMS/streamlit-deployment) """ ) +v_space(1, c2) c2.image("assets/pyopenms_transparent_background.png", width=300) -st.markdown("## πŸ‘‹ Quick Start") if Path("OpenMS-App.zip").exists(): st.subsubheader( """ @@ -62,7 +63,7 @@ """ ) -st.markdown("### πŸ“– Documentation") +st.markdown("## πŸ“– Documentation") st.markdown( f""" This template app includes documentation for **users** including **installation** and introduction to template specific concepts such as **workspaces** and developers with detailed instructions on **how to create and deploy your own app** based on this template. @@ -73,25 +74,41 @@ label="Read documentation here, select chapter in the content menu.", icon="➑️", ) -st.markdown("### Example pages: workflows, visualization and more") + +st.markdown( + """## Workspaces and Settings +The **sidebar** contains to boxes, one for **workspaces** and one for **settings**. + +πŸ–₯️ **Workspaces** store user inputs, parameters and results for a specific session or analysis task. + +In **online mode** where the app is hosted on a remote server the workspace has a unique identifier number which can be shared with collaboration partners or stored for later access. + +In **local mode** where the app is run locally on a PC (e.g. via Windows executable) the user can create and delete separate workspaces for different projects. + +βš™οΈ **Settings** contain global settings which are relevant for all pages, such as the image export format. +""" +) + + +st.markdown("## Example pages: workflows, visualization and more") st.markdown( """ This app serves both as documentation and showcase what's possible with OpenMS web apps. In general there are two options for building workflows. -#### 1. πŸš€ **TOPP Workflow Framework** +### 1. πŸš€ **TOPP Workflow Framework** Use this option if you want a standardized framework for building your workflow. -- **pre-defined user interface** all in one streamlit page with all steps in different tabs: +- **Pre-defined user interface** all in one streamlit page with all steps in different tabs: - **File Upload**: upload, download and delete input files - **Configure**: Automatically display input widgets for all paramters in TOPP tools and custom Python scripts - **Run**: Start and stop workflow execution, includes continous log - **Results**: Interactive result dashboard -- **write less code**: everything from file upload, input widget generation and execution of tools is handled via convenient functions -- **fast and performant workflows**: Automatic parallel execution of TOPP tools ensures great speed, comparable with workflows written in bash - +- **Write less code**: everything from file upload, input widget generation and execution of tools is handled via convenient functions +- **Fast and performant workflows**: Automatic parallel execution of TOPP tools ensures great speed, comparable with workflows written in bash +- **Ideal for longer workflows**: Close the app and come back to the still running or finish workflow the next day, by entering your workspace again. """ ) st.page_link( @@ -104,7 +121,7 @@ ) st.markdown( """ -#### 2. 🐍 **Flexible, custom workflow with pyOpenMS on multiple pages** +### 2. 🐍 **Flexible, custom workflow with pyOpenMS on multiple pages** Use this option if you want full control over your workflow implementation and user interface. @@ -131,7 +148,7 @@ st.markdown( """ -#### Other Topics +### Other Topics Includes other example pages which are independent to showcase other functionalities. """ @@ -145,4 +162,4 @@ "pages/run_subprocess.py", label="How to run any command line tool as subprocess from within the OpenMS web app.", icon="➑️", -) \ No newline at end of file +) From 76082f839bca33473c78426c9eba7c82f927ef7f Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 13:04:11 +0200 Subject: [PATCH 15/24] pylint errors --- src/plotting/MSExperimentPlotter.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/plotting/MSExperimentPlotter.py b/src/plotting/MSExperimentPlotter.py index 7cbf0fba1..c42df2c40 100644 --- a/src/plotting/MSExperimentPlotter.py +++ b/src/plotting/MSExperimentPlotter.py @@ -6,7 +6,7 @@ import numpy as np import plotly.graph_objects as go -from .BasePlotter import Colors, _BasePlotter, _BasePlotterConfig +from src.plotting.BasePlotter import Colors, _BasePlotter, _BasePlotterConfig @dataclass(kw_only=True) @@ -15,6 +15,13 @@ class MSExperimentPlotterConfig(_BasePlotterConfig): num_RT_bins: int = 50 num_mz_bins: int = 50 plot3D: bool = False + title: str = "Peak Map" + xlabel: str = "RT (s)" + ylabel: str = "m/z" + height: int = 500 + width: int = 750 + relative_intensity: bool = False + show_legend: bool = True class MSExperimentPlotter(_BasePlotter): From 85b9a1eab351f824a15d217e99e25ae5e1b80639 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 10 Jul 2024 14:16:59 +0200 Subject: [PATCH 16/24] rename pages directory due to streamlit warning - warning to use a pages directory within an app using st.navigation - could lead to unwanted behaviour --- Dockerfile | 2 +- Dockerfile_simple | 2 +- app.py | 16 ++++++++-------- {pages => content}/.gitignore | 0 {pages => content}/documentation.py | 2 +- {pages => content}/file_upload.py | 0 {pages => content}/quickstart.py | 16 ++++++++-------- {pages => content}/raw_data_viewer.py | 0 {pages => content}/run_example_workflow.py | 0 {pages => content}/run_subprocess.py | 0 {pages => content}/simple_workflow.py | 0 {pages => content}/topp_workflow.py | 0 src/captcha_.py | 8 ++++---- src/view.py | 4 ++-- 14 files changed, 25 insertions(+), 25 deletions(-) rename {pages => content}/.gitignore (100%) rename {pages => content}/documentation.py (99%) rename {pages => content}/file_upload.py (100%) rename {pages => content}/quickstart.py (94%) rename {pages => content}/raw_data_viewer.py (100%) rename {pages => content}/run_example_workflow.py (100%) rename {pages => content}/run_subprocess.py (100%) rename {pages => content}/simple_workflow.py (100%) rename {pages => content}/topp_workflow.py (100%) diff --git a/Dockerfile b/Dockerfile index 6a2065737..54a5e3b40 100644 --- a/Dockerfile +++ b/Dockerfile @@ -113,7 +113,7 @@ COPY app.py /app/app.py COPY src/ /app/src COPY assets/ /app/assets COPY example-data/ /app/example-data -COPY pages/ /app/pages +COPY content/ /app/pages # For streamlit configuration COPY .streamlit/config.toml /app/.streamlit/config.toml COPY clean-up-workspaces.py /app/clean-up-workspaces.py diff --git a/Dockerfile_simple b/Dockerfile_simple index 30d6a7c78..59c456ce9 100644 --- a/Dockerfile_simple +++ b/Dockerfile_simple @@ -57,7 +57,7 @@ COPY app.py /app/app.py COPY src/ /app/src COPY assets/ /app/assets COPY example-data/ /app/example-data -COPY pages/ /app/pages +COPY content/ /app/pages # For streamlit configuration COPY .streamlit/config.toml /app/.streamlit/config.toml diff --git a/app.py b/app.py index 1dd93f3e6..d3c69c332 100644 --- a/app.py +++ b/app.py @@ -3,20 +3,20 @@ pages = { "OpenMS Web App" : [ - st.Page(Path("pages", "quickstart.py"), title="Quickstart", icon="πŸ‘‹"), - st.Page(Path("pages", "documentation.py"), title="Documentation", icon="πŸ“–"), + st.Page(Path("content", "quickstart.py"), title="Quickstart", icon="πŸ‘‹"), + st.Page(Path("content", "documentation.py"), title="Documentation", icon="πŸ“–"), ], "TOPP Workflow Framework": [ - st.Page(Path("pages", "topp_workflow.py"), title="TOPP Workflow", icon="πŸš€"), + st.Page(Path("content", "topp_workflow.py"), title="TOPP Workflow", icon="πŸš€"), ], "pyOpenMS Workflow" : [ - st.Page(Path("pages", "file_upload.py"), title="File Upload", icon="πŸ“‚"), - st.Page(Path("pages", "raw_data_viewer.py"), title="View MS data", icon="πŸ‘€"), - st.Page(Path("pages", "run_example_workflow.py"), title="Run Workflow", icon="βš™οΈ"), + st.Page(Path("content", "file_upload.py"), title="File Upload", icon="πŸ“‚"), + st.Page(Path("content", "raw_data_viewer.py"), title="View MS data", icon="πŸ‘€"), + st.Page(Path("content", "run_example_workflow.py"), title="Run Workflow", icon="βš™οΈ"), ], "Others Topics": [ - st.Page(Path("pages", "simple_workflow.py"), title="Simple Workflow", icon="βš™οΈ"), - st.Page(Path("pages", "run_subprocess.py"), title="Run Subprocess", icon="πŸ–₯️"), + st.Page(Path("content", "simple_workflow.py"), title="Simple Workflow", icon="βš™οΈ"), + st.Page(Path("content", "run_subprocess.py"), title="Run Subprocess", icon="πŸ–₯️"), ] } diff --git a/pages/.gitignore b/content/.gitignore similarity index 100% rename from pages/.gitignore rename to content/.gitignore diff --git a/pages/documentation.py b/content/documentation.py similarity index 99% rename from pages/documentation.py rename to content/documentation.py index e556e571e..91f481e07 100644 --- a/pages/documentation.py +++ b/content/documentation.py @@ -261,7 +261,7 @@ > πŸ’‘ Simply set a name for the workflow and overwrite the **`upload`**, **`configure`**, **`execution`** and **`results`** methods in your **`Workflow`** class. -The file `pages/6_TOPP-Workflow.py` displays the workflow content and can, but does not have to be modified. +The file `content/6_TOPP-Workflow.py` displays the workflow content and can, but does not have to be modified. The `Workflow` class contains four important members, which you can use to build your own workflow: diff --git a/pages/file_upload.py b/content/file_upload.py similarity index 100% rename from pages/file_upload.py rename to content/file_upload.py diff --git a/pages/quickstart.py b/content/quickstart.py similarity index 94% rename from pages/quickstart.py rename to content/quickstart.py index e9eb1ec0f..a912b9239 100644 --- a/pages/quickstart.py +++ b/content/quickstart.py @@ -70,7 +70,7 @@ """ ) st.page_link( - "pages/documentation.py", + "content/documentation.py", label="Read documentation here, select chapter in the content menu.", icon="➑️", ) @@ -112,12 +112,12 @@ """ ) st.page_link( - "pages/documentation.py", + "content/documentation.py", label="Check out extensive documentation on the TOPP tool framework.", icon="➑️", ) st.page_link( - "pages/topp_workflow.py", label="Play around with the example workflow.", icon="➑️" + "content/topp_workflow.py", label="Play around with the example workflow.", icon="➑️" ) st.markdown( """ @@ -131,17 +131,17 @@ """ ) st.page_link( - "pages/file_upload.py", + "content/file_upload.py", label="Upload your own mzML files or use the provided example data set.", icon="➑️", ) st.page_link( - "pages/raw_data_viewer.py", + "content/raw_data_viewer.py", label="Visualize mzML file content in an interactive dashboard.", icon="➑️", ) st.page_link( - "pages/run_example_workflow.py", + "content/run_example_workflow.py", label="Run a small example workflow with mzML files and check out results.", icon="➑️", ) @@ -154,12 +154,12 @@ """ ) st.page_link( - "pages/simple_workflow.py", + "content/simple_workflow.py", label="A very simple worklfow explaining the concepts of data caching in streamlit.", icon="➑️", ) st.page_link( - "pages/run_subprocess.py", + "content/run_subprocess.py", label="How to run any command line tool as subprocess from within the OpenMS web app.", icon="➑️", ) diff --git a/pages/raw_data_viewer.py b/content/raw_data_viewer.py similarity index 100% rename from pages/raw_data_viewer.py rename to content/raw_data_viewer.py diff --git a/pages/run_example_workflow.py b/content/run_example_workflow.py similarity index 100% rename from pages/run_example_workflow.py rename to content/run_example_workflow.py diff --git a/pages/run_subprocess.py b/content/run_subprocess.py similarity index 100% rename from pages/run_subprocess.py rename to content/run_subprocess.py diff --git a/pages/simple_workflow.py b/content/simple_workflow.py similarity index 100% rename from pages/simple_workflow.py rename to content/simple_workflow.py diff --git a/pages/topp_workflow.py b/content/topp_workflow.py similarity index 100% rename from pages/topp_workflow.py rename to content/topp_workflow.py diff --git a/src/captcha_.py b/src/captcha_.py index de688f5bc..b8b7d0ccd 100644 --- a/src/captcha_.py +++ b/src/captcha_.py @@ -64,7 +64,7 @@ def delete_page(main_script_path_str: str, page_name: str) -> None: def restore_all_pages(main_script_path_str: str) -> None: """ - restore all pages found in the "pages" directory to an app's configuration. + restore all pages found in the "content" directory to an app's configuration. Args: main_script_path_str (str): The name of the main page, typically the app's name. @@ -79,12 +79,12 @@ def restore_all_pages(main_script_path_str: str) -> None: main_script_path = Path(main_script_path_str) # Define the directory where pages are stored - pages_dir = main_script_path.parent / "pages" + pages_dir = main_script_path.parent / "content" # To store the pages for later, to add in ascending order pages_temp = [] - # Iterate over all .py files in the "pages" directory + # Iterate over all .py files in the "content" directory for script_path in pages_dir.glob("*.py"): # append path with file name script_path_str = str(script_path.resolve()) @@ -146,7 +146,7 @@ def add_page(main_script_path_str: str, page_name: str) -> None: main_script_path = Path(main_script_path_str) # Define the directory where pages are stored - pages_dir = main_script_path.parent / "pages" + pages_dir = main_script_path.parent / "content" # Find the script path corresponding to the new page script_path = [f for f in pages_dir.glob("*.py") if f.name.find(page_name) != -1][0] diff --git a/src/view.py b/src/view.py index 1386b45fc..48bb4a2f4 100644 --- a/src/view.py +++ b/src/view.py @@ -106,8 +106,8 @@ def plot_bpc_tic() -> go.Figure: name="XIC", showlegend=True, ) - except: - st.error("Invalid m/z value.") + except ValueError: + st.error("Invalid m/z value for XIC provided. Please enter a valid number.") fig.update_layout( title=f"{st.session_state.view_selected_file}", From f2dc41f5d5b6b225e5b88e5c5142081e38883248 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Thu, 11 Jul 2024 09:28:51 +0200 Subject: [PATCH 17/24] fix pages directory name in executable workflow --- .github/workflows/build-windows-executable-app.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-windows-executable-app.yaml b/.github/workflows/build-windows-executable-app.yaml index a0de350b2..a48a9b94e 100644 --- a/.github/workflows/build-windows-executable-app.yaml +++ b/.github/workflows/build-windows-executable-app.yaml @@ -209,7 +209,7 @@ jobs: mv python-${{ env.PYTHON_VERSION }} streamlit_exe mv run_app.bat streamlit_exe cp -r src streamlit_exe - cp -r pages streamlit_exe + cp -r content streamlit_exe cp -r assets streamlit_exe cp -r example-data streamlit_exe cp -r .streamlit streamlit_exe From 3b4f8fb36e30cd7d9bdab8d991317888a43c2e5d Mon Sep 17 00:00:00 2001 From: axelwalter Date: Thu, 11 Jul 2024 15:49:10 +0200 Subject: [PATCH 18/24] fix file upload in TOPP workflow - removed fragment from file upload sections, since configure depends on it - check if fallback files are present and remove before adding new files --- src/Workflow.py | 1 - src/workflow/StreamlitUI.py | 11 +++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/Workflow.py b/src/Workflow.py index 3f72334b3..3e5864fe1 100644 --- a/src/Workflow.py +++ b/src/Workflow.py @@ -15,7 +15,6 @@ def __init__(self) -> None: # Initialize the parent class with the workflow name. super().__init__("TOPP Workflow", st.session_state["workspace"]) - @st.experimental_fragment def upload(self) -> None: t = st.tabs(["MS data"]) with t[0]: diff --git a/src/workflow/StreamlitUI.py b/src/workflow/StreamlitUI.py index 5d97b56cb..158a04b34 100644 --- a/src/workflow/StreamlitUI.py +++ b/src/workflow/StreamlitUI.py @@ -46,8 +46,15 @@ def upload_widget( name (str, optional): Display name for the upload component. Defaults to the key if not provided. fallback (Union[List, str], optional): Default files to use if no files are uploaded. """ - # streamlit uploader can't handle file types with upper and lower case letters files_dir = Path(self.workflow_dir, "input-files", key) + + # create the files dir + files_dir.mkdir(exist_ok=True) + + # check if only fallback files are in files_dir, if yes, reset the directory before adding new files + if [Path(f).name for f in Path(files_dir).iterdir()] == [Path(f).name for f in fallback]: + shutil.rmtree(files_dir) + files_dir.mkdir() if not name: name = key.replace("-", " ") @@ -107,7 +114,7 @@ def upload_widget( my_bar.empty() st.success("Successfully copied files!") - if fallback: + if fallback and not any(Path(files_dir).iterdir()): files_dir.mkdir(parents=True, exist_ok=True) if isinstance(fallback, str): fallback = [fallback] From ceefccb63ac6d06854777f7098c6993f33b7ba1a Mon Sep 17 00:00:00 2001 From: axelwalter Date: Thu, 11 Jul 2024 15:51:48 +0200 Subject: [PATCH 19/24] replace 0 and O in captcha --- src/captcha_.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/captcha_.py b/src/captcha_.py index b8b7d0ccd..ada43e738 100644 --- a/src/captcha_.py +++ b/src/captcha_.py @@ -203,7 +203,7 @@ def captcha_control(): if "Captcha" not in st.session_state: st.session_state["Captcha"] = "".join( random.choices(string.ascii_uppercase + string.digits, k=length_captcha) - ) + ).replace("0", "A").replace("O", "B") col1, _ = st.columns(2) with col1.form("captcha-form"): From ab90dfe18a11736db29b3ae5b97193ef21349593 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Thu, 11 Jul 2024 16:15:04 +0200 Subject: [PATCH 20/24] TOPP file upload - files directory created once - removed unneccessary mkdirs --- src/workflow/StreamlitUI.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/workflow/StreamlitUI.py b/src/workflow/StreamlitUI.py index 158a04b34..50b3c0b28 100644 --- a/src/workflow/StreamlitUI.py +++ b/src/workflow/StreamlitUI.py @@ -49,7 +49,7 @@ def upload_widget( files_dir = Path(self.workflow_dir, "input-files", key) # create the files dir - files_dir.mkdir(exist_ok=True) + files_dir.mkdir(exist_ok=True, parents=True) # check if only fallback files are in files_dir, if yes, reset the directory before adding new files if [Path(f).name for f in Path(files_dir).iterdir()] == [Path(f).name for f in fallback]: @@ -76,7 +76,6 @@ def upload_widget( f"Add **{name}**", use_container_width=True, type="primary" ): if files: - files_dir.mkdir(parents=True, exist_ok=True) # in case of online mode a single file is returned -> put in list if not isinstance(files, list): files = [files] @@ -104,7 +103,6 @@ def upload_widget( f"No files with type **{file_type}** found in specified folder." ) else: - files_dir.mkdir(parents=True, exist_ok=True) # Copy all mzML files to workspace mzML directory, add to selected files files = list(Path(local_dir).glob("*.mzML")) my_bar = st.progress(0) @@ -115,7 +113,6 @@ def upload_widget( st.success("Successfully copied files!") if fallback and not any(Path(files_dir).iterdir()): - files_dir.mkdir(parents=True, exist_ok=True) if isinstance(fallback, str): fallback = [fallback] for f in fallback: @@ -137,10 +134,10 @@ def upload_widget( if files_dir.exists() and not any(files_dir.iterdir()): shutil.rmtree(files_dir) - c1, c2 = st.columns(2) + c1, _ = st.columns(2) if current_files: c1.info(f"Current **{name}** files:\n\n" + "\n\n".join(current_files)) - if c2.button( + if c1.button( f"πŸ—‘οΈ Remove all **{name}** files.", use_container_width=True, key=f"remove-files-{key}", From ceb19d05574c67cec8cddbf340c59323d5391dc3 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 24 Jul 2024 07:29:34 +0200 Subject: [PATCH 21/24] sort result sets to show most recent first --- content/run_example_workflow.py | 26 ++------------------------ src/mzmlfileworkflow.py | 31 ++++++++++++++++++++++++++++++- 2 files changed, 32 insertions(+), 25 deletions(-) diff --git a/content/run_example_workflow.py b/content/run_example_workflow.py index 8b806f141..4e722300d 100755 --- a/content/run_example_workflow.py +++ b/content/run_example_workflow.py @@ -1,10 +1,8 @@ import streamlit as st -import pandas as pd -import plotly.express as px from pathlib import Path -from src.common import page_setup, save_params, show_fig, show_table +from src.common import page_setup, save_params from src import mzmlfileworkflow # Page name "workflow" will show mzML file selector in sidebar @@ -45,26 +43,6 @@ else: st.warning("Select some mzML files.") -result_dirs = [f.name for f in Path(result_dir).iterdir() if f.is_dir()] -run_dir = st.selectbox("select result from run", result_dirs) -result_dir = Path(result_dir, run_dir) -# visualize workflow results if there are any -result_file_path = Path(result_dir, "result.tsv") - -if result_file_path.exists(): - df = pd.read_csv(result_file_path, sep="\t", index_col="filenames") - - if not df.empty: - tabs = st.tabs(["πŸ“ data", "πŸ“Š plot"]) - - with tabs[0]: - show_table(df, "mzML-workflow-result") - - with tabs[1]: - fig = px.bar(df) - st.info( - "πŸ’‘ Download figure with camera icon in top right corner. File format can be specified in settings." - ) - show_fig(fig, "mzML-workflow-results") +mzmlfileworkflow.result_section(result_dir) \ No newline at end of file diff --git a/src/mzmlfileworkflow.py b/src/mzmlfileworkflow.py index 4dd158739..e1a372bbc 100644 --- a/src/mzmlfileworkflow.py +++ b/src/mzmlfileworkflow.py @@ -4,7 +4,8 @@ import pandas as pd import time from datetime import datetime -from src.common import reset_directory +from src.common import reset_directory, show_fig, show_table +import plotly.express as px def mzML_file_get_num_spectra(filepath): @@ -68,3 +69,31 @@ def run_workflow(params, result_dir): } ) df.to_csv(Path(result_dir, "result.tsv"), sep="\t", index=False) + +@st.experimental_fragment +def result_section(result_dir): + date_strings = [f.name for f in Path(result_dir).iterdir() if f.is_dir()] + + result_dirs = sorted(date_strings, key=lambda date: datetime.strptime(date, "%Y-%m-%d %H:%M:%S"))[::-1] + + run_dir = st.selectbox("select result from run", result_dirs) + + result_dir = Path(result_dir, run_dir) + # visualize workflow results if there are any + result_file_path = Path(result_dir, "result.tsv") + + if result_file_path.exists(): + df = pd.read_csv(result_file_path, sep="\t", index_col="filenames") + + if not df.empty: + tabs = st.tabs(["πŸ“ data", "πŸ“Š plot"]) + + with tabs[0]: + show_table(df, "mzML-workflow-result") + + with tabs[1]: + fig = px.bar(df) + st.info( + "πŸ’‘ Download figure with camera icon in top right corner. File format can be specified in settings." + ) + show_fig(fig, "mzML-workflow-results") \ No newline at end of file From 66bb5cacb29f41dcadf8092d8f2cc909b1cd7723 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 24 Jul 2024 07:34:22 +0200 Subject: [PATCH 22/24] Update content/documentation.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Tom David MΓΌller <57191390+t0mdavid-m@users.noreply.github.com> --- content/documentation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/content/documentation.py b/content/documentation.py index 91f481e07..41d595f7a 100644 --- a/content/documentation.py +++ b/content/documentation.py @@ -64,7 +64,7 @@ ## Downloading Results -You can download the results of your analyses, including figures and tables, directly from the application: +You can download the results of your analyses, including data, figures and tables, directly from the application: - **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. - **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. From 432e87280ac91e7bcc9e94b4bff9832525f3f3db Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 24 Jul 2024 07:34:31 +0200 Subject: [PATCH 23/24] Update content/documentation.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Tom David MΓΌller <57191390+t0mdavid-m@users.noreply.github.com> --- content/documentation.py | 1 + 1 file changed, 1 insertion(+) diff --git a/content/documentation.py b/content/documentation.py index 41d595f7a..a11b54eed 100644 --- a/content/documentation.py +++ b/content/documentation.py @@ -67,6 +67,7 @@ You can download the results of your analyses, including data, figures and tables, directly from the application: - **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. - **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. +- **Data**: Use the download section in the sidebar to download the raw results of your analysis. ## Getting Started From bbedd94b82a71ab65308b35df82daaea5e5c2e56 Mon Sep 17 00:00:00 2001 From: axelwalter Date: Wed, 24 Jul 2024 08:06:30 +0200 Subject: [PATCH 24/24] move content of documentation pages to docs directory --- content/documentation.py | 530 +----------------- docs/.gitignore | 1 + docs/build_app.md | 68 +++ docs/deployment.md | 82 +++ docs/installation.md | 39 ++ docs/toppframework.py | 269 +++++++++ docs/user_guide.md | 44 ++ .../win_exe_with_embed_py.md | 2 +- .../win_exe_with_pyinstaller.md | 36 +- 9 files changed, 545 insertions(+), 526 deletions(-) create mode 100644 docs/.gitignore create mode 100644 docs/build_app.md create mode 100644 docs/deployment.md create mode 100644 docs/installation.md create mode 100644 docs/toppframework.py create mode 100644 docs/user_guide.md rename win_exe_with_embed_py.md => docs/win_exe_with_embed_py.md (95%) rename win_exe_with_pyinstaller.md => docs/win_exe_with_pyinstaller.md (79%) diff --git a/content/documentation.py b/content/documentation.py index a11b54eed..cc31a26c9 100644 --- a/content/documentation.py +++ b/content/documentation.py @@ -1,12 +1,7 @@ import streamlit as st -from src.Workflow import Workflow -from src.workflow.StreamlitUI import StreamlitUI -from src.workflow.FileManager import FileManager -from src.workflow.CommandExecutor import CommandExecutor from src.common import page_setup -from inspect import getsource from pathlib import Path -import requests +from docs.toppframework import content as topp_framework_content page_setup() @@ -33,54 +28,9 @@ ############################################################################################# if page == pages[0]: - st.markdown( - """ -# User Guide - -Welcome to the OpenMS Streamlit Web Application! This guide will help you understand how to use our tools effectively. - -## Advantages of OpenMS Web Apps - -OpenMS web applications provide a user-friendly interface for accessing the powerful features of OpenMS. Here are a few advantages: -- **Accessibility**: Access powerful OpenMS algorithms and TOPP tools from any device with a web browser. -- **Ease of Use**: Simplified user interface makes it easy for both beginners and experts to perform complex analyses. -- **No Installation Required**: Use the tools without the need to install OpenMS locally, saving time and system resources. - -## Workspaces - -In the OpenMS web application, workspaces are designed to keep your analysis organized: -- **Workspace Specific Parameters and Files**: Each workspace stores parameters and files (uploaded input files and results from workflows). -- **Persistence**: Your workspaces and parameters are saved, so you can return to your analysis anytime and pick up where you left off. - -## Online and Local Mode Differences - -There are a few key differences between operating in online and local modes: -- **File Uploads**: - - *Online Mode*: You can upload only one file at a time. This helps manage server load and optimizes performance. - - *Local Mode*: Multiple file uploads are supported, giving you flexibility when working with large datasets. -- **Workspace Access**: - - In online mode, workspaces are stored temporarily and will be cleared after seven days of inactivity. - - In local mode, workspaces are saved on your local machine, allowing for persistent storage. - -## Downloading Results - -You can download the results of your analyses, including data, figures and tables, directly from the application: -- **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. -- **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. -- **Data**: Use the download section in the sidebar to download the raw results of your analysis. - -## Getting Started - -To get started: -1. Select or create a new workspace. -2. Upload your data file. -3. Set the necessary parameters for your analysis. -4. Run the analysis. -5. View and download your results. - -For more detailed information on each step, refer to the specific sections of this guide. -""" - ) + with open(Path("docs", "user_guide.md"), "r", encoding="utf-8") as f: + content = f.read() + st.markdown(content) ############################################################################################# # Installation @@ -101,495 +51,59 @@ mime="archive/zip", type="primary", ) - - st.markdown( - """ -# Installation - -## Windows - -The app is available as pre-packaged Windows executable, including all dependencies. - -The windows executable is built by a GitHub action and can be downloaded [here](https://github.com/OpenMS/streamlit-template/actions/workflows/build-windows-executable-app.yaml). -Select the latest successfull run and download the zip file from the artifacts section, while signed in to GitHub. - -## Python - -Clone the [streamlit-template repository](https://github.com/OpenMS/streamlit-template). It includes files to install dependencies via pip or conda. - -### via pip in an existing Python environment - -To install all required depdencies via pip in an already existing Python environment, run the following command in the terminal: - -`pip install -r requirements.txt` - -### create new environment via conda/mamba - -Create and activate the conda environment: - -`conda env create -f environment.yml` - -`conda activate streamlit-env` - -### run the app - -Run the app via streamlit command in the terminal with or without *local* mode (default is *online* mode). Learn more about *local* and *online* mode in the documentation page πŸ“– **OpenMS Template App**. - -`streamlit run app.py [local]` - -## Docker - -This repository contains two Dockerfiles. - -1. `Dockerfile`: This Dockerfile builds all dependencies for the app including Python packages and the OpenMS TOPP tools. Recommended for more complex workflows where you want to use the OpenMS TOPP tools for instance with the **TOPP Workflow Framework**. -2. `Dockerfile_simple`: This Dockerfile builds only the Python packages. Recommended for simple apps using pyOpenMS only. - -""" - ) + with open(Path("docs", "installation.md"), "r", encoding="utf-8") as f: + content = f.read() + st.markdown(content) ############################################################################################# # Developer Overview, how to build app based on Template ############################################################################################# if page == pages[2]: - st.markdown( - """ -# Build your own app based on this template - -## App layout - -- *Main page* contains explanatory text on how to use the app and a workspace selector. `app.py` -- *Pages* can be navigated via *Sidebar*. Sidebar also contains the OpenMS logo, settings panel and a workspace indicator. The *main page* contains a workspace selector as well. -- See *pages* in the template app for example use cases. The content of this app serves as a documentation. - -## Key concepts - -- **Workspaces** -: Directories where all data is generated and uploaded can be stored as well as a workspace specific parameter file. -- **Run the app locally and online** -: Launching the app with the `local` argument lets the user create/remove workspaces. In the online the user gets a workspace with a specific ID. -- **Parameters** -: Parameters (defaults in `assets/default-params.json`) store changing parameters for each workspace. Parameters are loaded via the page_setup function at the start of each page. To track a widget variable via parameters simply give them a key and add a matching entry in the default parameters file. Initialize a widget value from the params dictionary. - -```python -params = page_setup() - -st.number_input(label="x dimension", min_value=1, max_value=20, -value=params["example-y-dimension"], step=1, key="example-y-dimension") - -save_params() -``` - -## Code structure - -- **Pages** must be placed in the `pages` directory. -- It is recommended to use a separate file for defining functions per page in the `src` directory. -- The `src/common.py` file contains a set of useful functions for common use (e.g. rendering a table with download button). - -## Modify the template to build your own app - -1. In `src/common.py`, update the name of your app and the repository name - ```python - APP_NAME = "OpenMS Streamlit App" - REPOSITORY_NAME = "streamlit-template" - ``` -2. In `clean-up-workspaces.py`, update the name of the workspaces directory to `/workspaces-` - ```python - workspaces_directory = Path("/workspaces-streamlit-template") - ``` -3. Update `README.md` accordingly - - -**Dockerfile-related** -1. Choose one of the Dockerfiles depending on your use case: - - `Dockerfile` builds OpenMS including TOPP tools - - `Dockerfile_simple` uses pyOpenMS only -2. Update the Dockerfile: - - with the `GITHUB_USER` owning the Streamlit app repository - - with the `GITHUB_REPO` name of the Streamlit app repository - - if your main page Python file is not called `app.py`, modify the following line - ```dockerfile - RUN echo "mamba run --no-capture-output -n streamlit-env streamlit run app.py" >> /app/entrypoint.sh - ``` -3. Update Python package dependency files: - - `requirements.txt` if using `Dockerfile_simple` - - `environment.yml` if using `Dockerfile` - -## How to build a workflow - -### Simple workflow using pyOpenMS - -Take a look at the example pages `Simple Workflow` or `Workflow with mzML files` for examples (on the *sidebar*). Put Streamlit logic inside the pages and call the functions with workflow logic from from the `src` directory (for our examples `src/simple_workflow.py` and `src/mzmlfileworkflow.py`). - -### Complex workflow using TOPP tools - -This template app features a module in `src/workflow` that allows for complex and long workflows to be built very efficiently. Check out the `TOPP Workflow Framework` page for more information (on the *sidebar*). -""" - ) + with open(Path("docs", "build_app.md"), "r", encoding="utf-8") as f: + content = f.read() + st.markdown(content) ############################################################################################# # TOPP Workflow Framework ############################################################################################# if page == pages[3]: - wf = Workflow() - - st.title("TOPP Workflow Framework Documentation") - - st.markdown( - """ -## Features - -- streamlined methods for uploading files, setting parameters, and executing workflows -- automatic parameter handling -- quickly build parameter interface for TOPP tools with all parameters from *ini* files -- automatically create a log file for each workflow run with stdout and stderr -- workflow output updates automatically in short intervalls -- user can leave the app and return to the running workflow at any time -- quickly build a workflow with multiple steps channelling files between steps -""" - ) - - st.markdown( - """ -## Quickstart - -This repository contains a module in `src/workflow` that provides a framework for building and running analysis workflows. - -The `WorkflowManager` class provides the core workflow logic. It uses the `Logger`, `FileManager`, `ParameterManager`, and `CommandExecutor` classes to setup a complete workflow logic. - -To build your own workflow edit the file `src/TOPPWorkflow.py`. Use any streamlit components such as tabs (as shown in example), columns, or even expanders to organize the helper functions for displaying file upload and parameter widgets. - -> πŸ’‘ Simply set a name for the workflow and overwrite the **`upload`**, **`configure`**, **`execution`** and **`results`** methods in your **`Workflow`** class. - -The file `content/6_TOPP-Workflow.py` displays the workflow content and can, but does not have to be modified. - -The `Workflow` class contains four important members, which you can use to build your own workflow: - -> **`self.params`:** dictionary of parameters stored in a JSON file in the workflow directory. Parameter handling is done automatically. Default values are defined in input widgets and non-default values are stored in the JSON file. - -> **`self.ui`:** object of type `StreamlitUI` contains helper functions for building the parameter and file upload widgets. - -> **`self.executor`:** object of type `CommandExecutor` can be used to run any command line tool alone or in parallel and includes a convenient method for running TOPP tools. - -> **`self.logger`:** object of type `Logger` to write any output to a log file during workflow execution. - -> **`self.file_manager`:** object of type `FileManager` to handle file types and creation of output directories. -""" - ) - - with st.expander("**Complete example for custom Workflow class**", expanded=False): - st.code(getsource(Workflow)) - - st.markdown( - """ -## File Upload - -All input files for the workflow will be stored within the workflow directory in the subdirectory `input-files` within it's own subdirectory for the file type. - -The subdirectory name will be determined by a **key** that is defined in the `self.ui.upload_widget` method. The uploaded files are available by the specific key for parameter input widgets and accessible while building the workflow. - -Calling this method will create a complete file upload widget section with the following components: - -- file uploader -- list of currently uploaded files with this key (or a warning if there are none) -- button to delete all files - -Fallback files(s) can be specified, which will be used if the user doesn't upload any files. This can be useful for example for database files where a default is provided. -""" - ) - - st.code(getsource(Workflow.upload)) - - st.info( - "πŸ’‘ Use the same **key** for parameter widgets, to select which of the uploaded files to use for analysis." - ) - - with st.expander("**Code documentation:**", expanded=True): - st.help(StreamlitUI.upload_widget) - - st.markdown( - """ -## Parameter Input - -The paramter section is already pre-defined as a form with buttons to **save parameters** and **load defaults** and a toggle to show TOPP tool parameters marked as advanced. - -Generating parameter input widgets is done with the `self.ui.input` method for any parameter and the `self.ui.input_TOPP` method for TOPP tools. - -**1. Choose `self.ui.input_widget` for any paramter not-related to a TOPP tool or `self.ui.select_input_file` for any input file:** - -It takes the obligatory **key** parameter. The key is used to access the parameter value in the workflow parameters dictionary `self.params`. Default values do not need to be specified in a separate file. Instead they are determined from the widgets default value automatically. Widget types can be specified or automatically determined from **default** and **options** parameters. It's suggested to add a **help** text and other parameters for numerical input. - -Make sure to match the **key** of the upload widget when calling `self.ui.input_TOPP`. - -**2. Choose `self.ui.input_TOPP` to automatically generate complete input sections for a TOPP tool:** - -It takes the obligatory **topp_tool_name** parameter and generates input widgets for each parameter present in the **ini** file (automatically created) except for input and output file parameters. For all input file parameters a widget needs to be created with `self.ui.select_input_file` with an appropriate **key**. For TOPP tool parameters only non-default values are stored. - -**3. Choose `self.ui.input_python` to automatically generate complete input sections for a custom Python tool:** - -Takes the obligatory **script_file** argument. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Parameters need to be specified in the Python script in the **DEFAULTS** variable with the mandatory **key** and **value** parameters. -""" - ) - - with st.expander( - "Options to use as dictionary keys for parameter definitions (see `src/python-tools/example.py` for an example)" - ): - st.markdown( - """ -**Mandatory** keys for each parameter -- *key:* a unique identifier -- *value:* the default value - -**Optional** keys for each parameter -- *name:* the name of the parameter -- *hide:* don't show the parameter in the parameter section (e.g. for **input/output files**) -- *options:* a list of valid options for the parameter -- *min:* the minimum value for the parameter (int and float) -- *max:* the maximum value for the parameter (int and float) -- *step_size:* the step size for the parameter (int and float) -- *help:* a description of the parameter -- *widget_type:* the type of widget to use for the parameter (default: auto) -- *advanced:* whether or not the parameter is advanced (default: False) -""" - ) - - st.code(getsource(Workflow.configure)) - st.info( - "πŸ’‘ Access parameter widget values by their **key** in the `self.params` object, e.g. `self.params['mzML-files']` will give all selected mzML files." - ) - - with st.expander("**Code documentation**", expanded=True): - st.help(StreamlitUI.input_widget) - st.help(StreamlitUI.select_input_file) - st.help(StreamlitUI.input_TOPP) - st.help(StreamlitUI.input_python) - st.markdown( - """ -## Building the Workflow - -Building the workflow involves **calling all (TOPP) tools** using **`self.executor`** with **input and output files** based on the **`FileManager`** class. For TOPP tools non-input-output parameters are handled automatically. Parameters for other processes and workflow logic can be accessed via widget keys (set in the parameter section) in the **`self.params`** dictionary. - -### FileManager - -The `FileManager` class serves as an interface for unified input and output files with useful functionality specific to building workflows, such as **setting a (new) file type** and **subdirectory in the workflows result directory**. - -Use the **`get_files`** method to get a list of all file paths as strings. - -Optionally set the following parameters modify the files: - -- **set_file_type** (str): set new file types and result subdirectory. -- **set_results_dir** (str): set a new subdirectory in the workflows result directory. -- **collect** (bool): collect all files into a single list. Will return a list with a single entry, which is a list of all files. Useful to pass to tools which can handle multiple input files at once. -""" - ) - - st.code( - """ -# Get all file paths as strings from self.param entry. -mzML_files = self.file_manager.get_files(self.params["mzML-files]) -# mzML_files = ['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML'] - -# Creating output files for a TOPP tool, setting a new file type and result subdirectory name. -feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="feature-detection") -# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Treatment.featureXML'] - -# Setting a name for the output directory automatically (useful if you never plan to access these files in the results section). -feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="auto") -# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Treatment.featureXML'] - -# Combining all mzML files to be passed to a TOPP tool in a single run. Using "collected" files as argument for self.file_manager.get_files will "un-collect" them. -mzML_files = self.file_manager.get_files(mzML_files, collect=True) -# mzML_files = [['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML']] - """ - ) - - with st.expander("**Code documentation**", expanded=True): - st.help(FileManager.get_files) - - st.markdown( - """ -### Running commands - -It is possible to execute any command line command using the **`self.executor`** object, either a single command or a list of commands in parallel. Furthermore a method to run TOPP tools is included. - -**1. Single command** - -The `self.executor.run_command` method takes a single command as input and optionally logs stdout and stderr to the workflow log (default True). -""" - ) - - st.code( - """ -self.executor.run_command(["command", "arg1", "arg2", ...]) -""" - ) - - st.markdown( - """ -**2. Run multiple commands in parallel** - -The `self.executor.run_multiple_commands` method takes a list of commands as inputs. - -**3. Run TOPP tools** - -The `self.executor.run_topp` method takes a TOPP tool name as input and a dictionary of input and output files as input. The **keys** need to match the actual input and output parameter names of the TOPP tool. The **values** should be of type `FileManager`. All other **non-default parameters (from input widgets)** will be passed to the TOPP tool automatically. - -Depending on the number of input files, the TOPP tool will be run either in parallel or in a single run (using **`FileManager.collect`**). -""" - ) - - st.info( - """πŸ’‘ **Input and output file order** - -In many tools, a single input file is processed to produce a single output file. -When dealing with lists of input or output files, the convention is that -files are paired based on their order. For instance, the n-th input file is -assumed to correspond to the n-th output file, maintaining a structured -relationship between input and output data. -""" - ) - st.code( - """ -# e.g. FeatureFinderMetabo takes single input files -in_files = self.file_manager.get_files(["sample1.mzML", "sample2.mzML"]) -out_files = self.file_manager.get_files(in_files, set_file_type="featureXML", set_results_dir="feature-detection") - -# Run FeatureFinderMetabo tool with input and output files in parallel for each pair of input/output files. -self.executor.run_topp("FeatureFinderMetabo", input_output={"in": in_files, "out": out_files}) -# FeaturFinderMetabo -in sample1.mzML -out workspace-dir/results/feature-detection/sample1.featureXML -# FeaturFinderMetabo -in sample2.mzML -out workspace-dir/results/feature-detection/sample2.featureXML - -# Run SiriusExport tool with mutliple input and output files. -out = self.file_manager.get_files("sirius.ms", set_results_dir="sirius-export") -self.executor.run_topp("SiriusExport", {"in": self.file_manager.get_files(in_files, collect=True), - "in_featureinfo": self.file_manager.get_files(out_files, collect=True), - "out": out_se}) -# SiriusExport -in sample1.mzML sample2.mzML -in_featureinfo sample1.featureXML sample2.featureXML -out sirius.ms - """ - ) - - st.markdown( - """ -**4. Run custom Python scripts** - -Sometimes it is useful to run custom Python scripts, for example for extra functionality which is not included in a TOPP tool. - -`self.executor.run_python` works similar to `self.executor.run_topp`, but takes a single Python script as input instead of a TOPP tool name. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Input and output file parameters need to be specified in the **input_output** dictionary. -""" - ) - - st.code( - """ -# e.g. example Python tool which modifies mzML files in place based on experimental design -self.ui.input_python(script_file="example", input_output={"in": in_mzML, "in_experimantal_design": FileManager(["path/to/experimantal-design.tsv"])}) - """ - ) - - st.markdown("**Example for a complete workflow section:**") - - st.code(getsource(Workflow.execution)) - - with st.expander("**Code documentation**", expanded=True): - st.help(CommandExecutor.run_command) - st.help(CommandExecutor.run_multiple_commands) - st.help(CommandExecutor.run_topp) - st.help(CommandExecutor.run_python) + topp_framework_content() ############################################################################################# # Windows Executables ############################################################################################# if page == pages[4]: - # Define CSS styles - css = """ - -""" - - st.markdown(css, unsafe_allow_html=True) - st.markdown( """ -# πŸ’» How to package everything for Windows executables +## πŸ’» How to package everything for Windows executables This guide explains how to package OpenMS apps into Windows executables using two different methods: """ ) - - def fetch_markdown_content(url): - response = requests.get(url) - if response.status_code == 200: - # Remove the first line from the content - content_lines = response.text.split("\n") - markdown_content = "\n".join(content_lines[1:]) - return markdown_content - else: - return None - - - tabs = ["embeddable Python", "PyInstaller"] + tabs = ["**embeddable Python**", "**PyInstaller**"] tabs = st.tabs(tabs) # window executable with embeddable python with tabs[0]: - markdown_url = "https://raw.githubusercontent.com/OpenMS/streamlit-template/main/win_exe_with_embed_py.md" - - markdown_content = fetch_markdown_content(markdown_url) - - if markdown_content: - st.markdown(markdown_content, unsafe_allow_html=True) - else: - st.error( - "Failed to fetch Markdown content from the specified URL.", markdown_url - ) + with open(Path("docs", "win_exe_with_embed_py.md"), "r", encoding="utf-8") as f: + content = f.read() + st.markdown(content) # window executable with pyinstaller with tabs[1]: - # URL of the Markdown document - markdown_url = "https://raw.githubusercontent.com/OpenMS/streamlit-template/main/win_exe_with_pyinstaller.md" - - markdown_content = fetch_markdown_content(markdown_url) - - if markdown_content: - st.markdown(markdown_content, unsafe_allow_html=True) - else: - st.error( - "Failed to fetch Markdown content from the specified URL. ", markdown_url - ) + with open(Path("docs", "win_exe_with_pyinstaller.md"), "r", encoding="utf-8") as f: + content = f.read() + st.markdown(content) ############################################################################################# # Deployment ############################################################################################# if page == pages[5]: - url = "https://raw.githubusercontent.com/OpenMS/streamlit-deployment/main/README.md" - - response = requests.get(url) - - if response.status_code == 200: - st.markdown(response.text) # or process the content as needed - else: - st.warning("Failed to get README from streamlit-deployment repository.") \ No newline at end of file + with open(Path("docs", "deployment.md"), "r", encoding="utf-8") as f: + content = f.read() + st.markdown(content) \ No newline at end of file diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 000000000..763624ebe --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +__pycache__/* \ No newline at end of file diff --git a/docs/build_app.md b/docs/build_app.md new file mode 100644 index 000000000..4eaec43e0 --- /dev/null +++ b/docs/build_app.md @@ -0,0 +1,68 @@ +# Build your own app based on this template + +## App layout + +*Pages* can be navigated via the *sidebar*, which also contains the OpenMS logo, settings panel and a workspace indicator. + +## Key concepts + +- **Workspaces** +: Directories where all data is generated and uploaded can be stored as well as a workspace specific parameter file. +- **Run the app locally and online** +: Launching the app with the `local` argument lets the user create/remove workspaces. In the online the user gets a workspace with a specific ID. +- **Parameters** +: Parameters (defaults in `assets/default-params.json`) store changing parameters for each workspace. Parameters are loaded via the page_setup function at the start of each page. To track a widget variable via parameters simply give them a key and add a matching entry in the default parameters file. Initialize a widget value from the params dictionary. + +```python +params = page_setup() + +st.number_input(label="x dimension", min_value=1, max_value=20, +value=params["example-y-dimension"], step=1, key="example-y-dimension") + +save_params() +``` + +## Code structure +- The main file `app.py` defines page layout. +- **Pages** must be placed in the `content` directory. +- It is recommended to use a separate file for defining functions per page in the `src` directory. +- The `src/common.py` file contains a set of useful functions for common use (e.g. rendering a table with download button). + +## Modify the template to build your own app + +1. In `src/common.py`, update the name of your app and the repository name + ```python + APP_NAME = "OpenMS Streamlit App" + REPOSITORY_NAME = "streamlit-template" + ``` +2. In `clean-up-workspaces.py`, update the name of the workspaces directory to `/workspaces-` + ```python + workspaces_directory = Path("/workspaces-streamlit-template") + ``` +3. Update `README.md` accordingly + + +**Dockerfile-related** +1. Choose one of the Dockerfiles depending on your use case: + - `Dockerfile` builds OpenMS including TOPP tools + - `Dockerfile_simple` uses pyOpenMS only +2. Update the Dockerfile: + - with the `GITHUB_USER` owning the Streamlit app repository + - with the `GITHUB_REPO` name of the Streamlit app repository + - if your main page Python file is not called `app.py`, modify the following line + ```dockerfile + RUN echo "mamba run --no-capture-output -n streamlit-env streamlit run app.py" >> /app/entrypoint.sh + ``` +3. Update Python package dependency files: + - `requirements.txt` if using `Dockerfile_simple` + - `environment.yml` if using `Dockerfile` + +## How to build a workflow + +### Simple workflow using pyOpenMS + +Take a look at the example pages `Simple Workflow` or `Workflow with mzML files` for examples (on the *sidebar*). Put Streamlit logic inside the pages and call the functions with workflow logic from from the `src` directory (for our examples `src/simple_workflow.py` and `src/mzmlfileworkflow.py`). + +### Complex workflow using TOPP tools + +This template app features a module in `src/workflow` that allows for complex and long workflows to be built very efficiently. Check out the `TOPP Workflow Framework` page for more information (on the *sidebar*). \ No newline at end of file diff --git a/docs/deployment.md b/docs/deployment.md new file mode 100644 index 000000000..f768ae347 --- /dev/null +++ b/docs/deployment.md @@ -0,0 +1,82 @@ +# OpenMS streamlit app deployment + +Multiple streamlit apps based on the [OpenMS streamlit template](https://github.com/OpenMS/streamlit-template/) can be deployed together using docker compose. + +## Features + +- deploy all OpenMS apps at once +- user data (in workspaces) is stored in persistent docker volumes for each app + +## Requirements +- Docker Compose + +## Deployment (e.g., needed after one app changed) + +**1. Make sure submodules are up-to-data.** + +`git submodule init` + +`git submodule update` + +**2. Specify GitHub token (to download Windows executables).** + +> This is **important**! Ommitting this step while result in all apps not having the option to download exetutables any more. + +Create a temporary `.env` file with your Github token. It should contain only one line: + +`GITHUB_TOKEN=` + +**3. Run docker-compose.** + +`docker-compose up --build -d` + +> Make sure to remove the `.env` file with your Github token after successfull build + +## Add new app + +This will add your app as a submodule to the streamlit deployment repository. + +**1. Fork and clone the [OpenMS streamlit deployment](https://github.com/OpenMS/streamlit-deployment) repository locally.** + +**2. Add your app as submodule. Make sure the app name is not used already.** + +`git submodule add ` + +**3. Initialize and update submodules.** + +`git submodule init` + +`git submodule update` + +**4. Add your app to `docker-compose.yml` file as a new service.** + +Copy the last service as a template. + +Check and update the following entries: + +- name of the service + - the name of the submodule +- build context + - the relative path to the submodule +- build dockerfile + - the correct Dockerfile +- image + - name of the docker image (typically the service name with underscores) +- ports + - chose an incremental host port number from the last service pointing to the streamlit port in docker container (8501) +- volumes + - update the names of the workspace directories, user data is stored outside of the docker container in a docker volume +- command + - update command with your main streamlit file + +**6. Test everything works locally.** + +Run docker-compose to launch all services. + +`docker-compose up --build -d` + +- there should be no errors building all services +- make sure all apps are accessible via their port from localhost +- test functionality of your app + +**7. Make a pull request with your changes to OpenMS/streamlit-deployment main branch.** \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 000000000..41a5cca8f --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,39 @@ +# Installation + +## Windows + +The app is available as pre-packaged Windows executable, including all dependencies. + +The windows executable is built by a GitHub action and can be downloaded [here](https://github.com/OpenMS/streamlit-template/actions/workflows/build-windows-executable-app.yaml). +Select the latest successfull run and download the zip file from the artifacts section, while signed in to GitHub. + +## Python + +Clone the [streamlit-template repository](https://github.com/OpenMS/streamlit-template). It includes files to install dependencies via pip or conda. + +### via pip in an existing Python environment + +To install all required depdencies via pip in an already existing Python environment, run the following command in the terminal: + +`pip install -r requirements.txt` + +### create new environment via conda/mamba + +Create and activate the conda environment: + +`conda env create -f environment.yml` + +`conda activate streamlit-env` + +### run the app + +Run the app via streamlit command in the terminal with or without *local* mode (default is *online* mode). Learn more about *local* and *online* mode in the documentation page πŸ“– **OpenMS Template App**. + +`streamlit run app.py [local]` + +## Docker + +This repository contains two Dockerfiles. + +1. `Dockerfile`: This Dockerfile builds all dependencies for the app including Python packages and the OpenMS TOPP tools. Recommended for more complex workflows where you want to use the OpenMS TOPP tools for instance with the **TOPP Workflow Framework**. +2. `Dockerfile_simple`: This Dockerfile builds only the Python packages. Recommended for simple apps using pyOpenMS only. diff --git a/docs/toppframework.py b/docs/toppframework.py new file mode 100644 index 000000000..1afa819b8 --- /dev/null +++ b/docs/toppframework.py @@ -0,0 +1,269 @@ +import streamlit as st +from src.Workflow import Workflow +from src.workflow.StreamlitUI import StreamlitUI +from src.workflow.FileManager import FileManager +from src.workflow.CommandExecutor import CommandExecutor +from inspect import getsource + +def content(): + st.title("TOPP Workflow Framework Documentation") + + st.markdown( + """ +## Features + +- streamlined methods for uploading files, setting parameters, and executing workflows +- automatic parameter handling +- quickly build parameter interface for TOPP tools with all parameters from *ini* files +- automatically create a log file for each workflow run with stdout and stderr +- workflow output updates automatically in short intervalls +- user can leave the app and return to the running workflow at any time +- quickly build a workflow with multiple steps channelling files between steps +""" + ) + + st.markdown( + """ +## Quickstart + +This repository contains a module in `src/workflow` that provides a framework for building and running analysis workflows. + +The `WorkflowManager` class provides the core workflow logic. It uses the `Logger`, `FileManager`, `ParameterManager`, and `CommandExecutor` classes to setup a complete workflow logic. + +To build your own workflow edit the file `src/TOPPWorkflow.py`. Use any streamlit components such as tabs (as shown in example), columns, or even expanders to organize the helper functions for displaying file upload and parameter widgets. + +> πŸ’‘ Simply set a name for the workflow and overwrite the **`upload`**, **`configure`**, **`execution`** and **`results`** methods in your **`Workflow`** class. + +The file `content/6_TOPP-Workflow.py` displays the workflow content and can, but does not have to be modified. + +The `Workflow` class contains four important members, which you can use to build your own workflow: + +> **`self.params`:** dictionary of parameters stored in a JSON file in the workflow directory. Parameter handling is done automatically. Default values are defined in input widgets and non-default values are stored in the JSON file. + +> **`self.ui`:** object of type `StreamlitUI` contains helper functions for building the parameter and file upload widgets. + +> **`self.executor`:** object of type `CommandExecutor` can be used to run any command line tool alone or in parallel and includes a convenient method for running TOPP tools. + +> **`self.logger`:** object of type `Logger` to write any output to a log file during workflow execution. + +> **`self.file_manager`:** object of type `FileManager` to handle file types and creation of output directories. +""" + ) + + with st.expander("**Complete example for custom Workflow class**", expanded=False): + st.code(getsource(Workflow)) + + st.markdown( + """ +## File Upload + +All input files for the workflow will be stored within the workflow directory in the subdirectory `input-files` within it's own subdirectory for the file type. + +The subdirectory name will be determined by a **key** that is defined in the `self.ui.upload_widget` method. The uploaded files are available by the specific key for parameter input widgets and accessible while building the workflow. + +Calling this method will create a complete file upload widget section with the following components: + +- file uploader +- list of currently uploaded files with this key (or a warning if there are none) +- button to delete all files + +Fallback files(s) can be specified, which will be used if the user doesn't upload any files. This can be useful for example for database files where a default is provided. +""" + ) + + st.code(getsource(Workflow.upload)) + + st.info( + "πŸ’‘ Use the same **key** for parameter widgets, to select which of the uploaded files to use for analysis." + ) + + with st.expander("**Code documentation:**", expanded=True): + st.help(StreamlitUI.upload_widget) + + st.markdown( + """ +## Parameter Input + +The paramter section is already pre-defined as a form with buttons to **save parameters** and **load defaults** and a toggle to show TOPP tool parameters marked as advanced. + +Generating parameter input widgets is done with the `self.ui.input` method for any parameter and the `self.ui.input_TOPP` method for TOPP tools. + +**1. Choose `self.ui.input_widget` for any paramter not-related to a TOPP tool or `self.ui.select_input_file` for any input file:** + +It takes the obligatory **key** parameter. The key is used to access the parameter value in the workflow parameters dictionary `self.params`. Default values do not need to be specified in a separate file. Instead they are determined from the widgets default value automatically. Widget types can be specified or automatically determined from **default** and **options** parameters. It's suggested to add a **help** text and other parameters for numerical input. + +Make sure to match the **key** of the upload widget when calling `self.ui.input_TOPP`. + +**2. Choose `self.ui.input_TOPP` to automatically generate complete input sections for a TOPP tool:** + +It takes the obligatory **topp_tool_name** parameter and generates input widgets for each parameter present in the **ini** file (automatically created) except for input and output file parameters. For all input file parameters a widget needs to be created with `self.ui.select_input_file` with an appropriate **key**. For TOPP tool parameters only non-default values are stored. + +**3. Choose `self.ui.input_python` to automatically generate complete input sections for a custom Python tool:** + +Takes the obligatory **script_file** argument. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Parameters need to be specified in the Python script in the **DEFAULTS** variable with the mandatory **key** and **value** parameters. +""" + ) + + with st.expander( + "Options to use as dictionary keys for parameter definitions (see `src/python-tools/example.py` for an example)" + ): + st.markdown( + """ +**Mandatory** keys for each parameter +- *key:* a unique identifier +- *value:* the default value + +**Optional** keys for each parameter +- *name:* the name of the parameter +- *hide:* don't show the parameter in the parameter section (e.g. for **input/output files**) +- *options:* a list of valid options for the parameter +- *min:* the minimum value for the parameter (int and float) +- *max:* the maximum value for the parameter (int and float) +- *step_size:* the step size for the parameter (int and float) +- *help:* a description of the parameter +- *widget_type:* the type of widget to use for the parameter (default: auto) +- *advanced:* whether or not the parameter is advanced (default: False) +""" + ) + + st.code(getsource(Workflow.configure)) + st.info( + "πŸ’‘ Access parameter widget values by their **key** in the `self.params` object, e.g. `self.params['mzML-files']` will give all selected mzML files." + ) + + with st.expander("**Code documentation**", expanded=True): + st.help(StreamlitUI.input_widget) + st.help(StreamlitUI.select_input_file) + st.help(StreamlitUI.input_TOPP) + st.help(StreamlitUI.input_python) + st.markdown( + """ +## Building the Workflow + +Building the workflow involves **calling all (TOPP) tools** using **`self.executor`** with **input and output files** based on the **`FileManager`** class. For TOPP tools non-input-output parameters are handled automatically. Parameters for other processes and workflow logic can be accessed via widget keys (set in the parameter section) in the **`self.params`** dictionary. + +### FileManager + +The `FileManager` class serves as an interface for unified input and output files with useful functionality specific to building workflows, such as **setting a (new) file type** and **subdirectory in the workflows result directory**. + +Use the **`get_files`** method to get a list of all file paths as strings. + +Optionally set the following parameters modify the files: + +- **set_file_type** (str): set new file types and result subdirectory. +- **set_results_dir** (str): set a new subdirectory in the workflows result directory. +- **collect** (bool): collect all files into a single list. Will return a list with a single entry, which is a list of all files. Useful to pass to tools which can handle multiple input files at once. +""" + ) + + st.code( + """ +# Get all file paths as strings from self.param entry. +mzML_files = self.file_manager.get_files(self.params["mzML-files]) +# mzML_files = ['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML'] + +# Creating output files for a TOPP tool, setting a new file type and result subdirectory name. +feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="feature-detection") +# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/feature-detection/Treatment.featureXML'] + +# Setting a name for the output directory automatically (useful if you never plan to access these files in the results section). +feature_detection_out = self.file_manager.get_files(mzML_files, set_file_type="featureXML", set_results_dir="auto") +# feature_detection_out = ['../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Control.featureXML', '../workspaces-streamlit-template/default/topp-workflow/results/6DUd/Treatment.featureXML'] + +# Combining all mzML files to be passed to a TOPP tool in a single run. Using "collected" files as argument for self.file_manager.get_files will "un-collect" them. +mzML_files = self.file_manager.get_files(mzML_files, collect=True) +# mzML_files = [['../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Control.mzML', '../workspaces-streamlit-template/default/topp-workflow/input-files/mzML-files/Treatment.mzML']] + """ + ) + + with st.expander("**Code documentation**", expanded=True): + st.help(FileManager.get_files) + + st.markdown( + """ +### Running commands + +It is possible to execute any command line command using the **`self.executor`** object, either a single command or a list of commands in parallel. Furthermore a method to run TOPP tools is included. + +**1. Single command** + +The `self.executor.run_command` method takes a single command as input and optionally logs stdout and stderr to the workflow log (default True). +""" + ) + + st.code( + """ +self.executor.run_command(["command", "arg1", "arg2", ...]) +""" + ) + + st.markdown( + """ +**2. Run multiple commands in parallel** + +The `self.executor.run_multiple_commands` method takes a list of commands as inputs. + +**3. Run TOPP tools** + +The `self.executor.run_topp` method takes a TOPP tool name as input and a dictionary of input and output files as input. The **keys** need to match the actual input and output parameter names of the TOPP tool. The **values** should be of type `FileManager`. All other **non-default parameters (from input widgets)** will be passed to the TOPP tool automatically. + +Depending on the number of input files, the TOPP tool will be run either in parallel or in a single run (using **`FileManager.collect`**). +""" + ) + + st.info( + """πŸ’‘ **Input and output file order** + +In many tools, a single input file is processed to produce a single output file. +When dealing with lists of input or output files, the convention is that +files are paired based on their order. For instance, the n-th input file is +assumed to correspond to the n-th output file, maintaining a structured +relationship between input and output data. +""" + ) + st.code( + """ +# e.g. FeatureFinderMetabo takes single input files +in_files = self.file_manager.get_files(["sample1.mzML", "sample2.mzML"]) +out_files = self.file_manager.get_files(in_files, set_file_type="featureXML", set_results_dir="feature-detection") + +# Run FeatureFinderMetabo tool with input and output files in parallel for each pair of input/output files. +self.executor.run_topp("FeatureFinderMetabo", input_output={"in": in_files, "out": out_files}) +# FeaturFinderMetabo -in sample1.mzML -out workspace-dir/results/feature-detection/sample1.featureXML +# FeaturFinderMetabo -in sample2.mzML -out workspace-dir/results/feature-detection/sample2.featureXML + +# Run SiriusExport tool with mutliple input and output files. +out = self.file_manager.get_files("sirius.ms", set_results_dir="sirius-export") +self.executor.run_topp("SiriusExport", {"in": self.file_manager.get_files(in_files, collect=True), + "in_featureinfo": self.file_manager.get_files(out_files, collect=True), + "out": out_se}) +# SiriusExport -in sample1.mzML sample2.mzML -in_featureinfo sample1.featureXML sample2.featureXML -out sirius.ms + """ + ) + + st.markdown( + """ +**4. Run custom Python scripts** + +Sometimes it is useful to run custom Python scripts, for example for extra functionality which is not included in a TOPP tool. + +`self.executor.run_python` works similar to `self.executor.run_topp`, but takes a single Python script as input instead of a TOPP tool name. The default location for the Python script files is in `src/python-tools` (in this case the `.py` file extension is optional in the **script_file** argument), however, any other path can be specified as well. Input and output file parameters need to be specified in the **input_output** dictionary. +""" + ) + + st.code( + """ +# e.g. example Python tool which modifies mzML files in place based on experimental design +self.ui.input_python(script_file="example", input_output={"in": in_mzML, "in_experimantal_design": FileManager(["path/to/experimantal-design.tsv"])}) + """ + ) + + st.markdown("**Example for a complete workflow section:**") + + st.code(getsource(Workflow.execution)) + + with st.expander("**Code documentation**", expanded=True): + st.help(CommandExecutor.run_command) + st.help(CommandExecutor.run_multiple_commands) + st.help(CommandExecutor.run_topp) + st.help(CommandExecutor.run_python) \ No newline at end of file diff --git a/docs/user_guide.md b/docs/user_guide.md new file mode 100644 index 000000000..c44d33b5e --- /dev/null +++ b/docs/user_guide.md @@ -0,0 +1,44 @@ +# User Guide + +Welcome to the OpenMS Streamlit Web Application! This guide will help you understand how to use our tools effectively. + +## Advantages of OpenMS Web Apps + +OpenMS web applications provide a user-friendly interface for accessing the powerful features of OpenMS. Here are a few advantages: +- **Accessibility**: Access powerful OpenMS algorithms and TOPP tools from any device with a web browser. +- **Ease of Use**: Simplified user interface makes it easy for both beginners and experts to perform complex analyses. +- **No Installation Required**: Use the tools without the need to install OpenMS locally, saving time and system resources. + +## Workspaces + +In the OpenMS web application, workspaces are designed to keep your analysis organized: +- **Workspace Specific Parameters and Files**: Each workspace stores parameters and files (uploaded input files and results from workflows). +- **Persistence**: Your workspaces and parameters are saved, so you can return to your analysis anytime and pick up where you left off. + +## Online and Local Mode Differences + +There are a few key differences between operating in online and local modes: +- **File Uploads**: + - *Online Mode*: You can upload only one file at a time. This helps manage server load and optimizes performance. + - *Local Mode*: Multiple file uploads are supported, giving you flexibility when working with large datasets. +- **Workspace Access**: + - In online mode, workspaces are stored temporarily and will be cleared after seven days of inactivity. + - In local mode, workspaces are saved on your local machine, allowing for persistent storage. + +## Downloading Results + +You can download the results of your analyses, including data, figures and tables, directly from the application: +- **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. +- **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. +- **Data**: Use the download section in the sidebar to download the raw results of your analysis. + +## Getting Started + +To get started: +1. Select or create a new workspace. +2. Upload your data file. +3. Set the necessary parameters for your analysis. +4. Run the analysis. +5. View and download your results. + +For more detailed information on each step, refer to the specific sections of this guide. \ No newline at end of file diff --git a/win_exe_with_embed_py.md b/docs/win_exe_with_embed_py.md similarity index 95% rename from win_exe_with_embed_py.md rename to docs/win_exe_with_embed_py.md index b7c793251..fb799f99a 100644 --- a/win_exe_with_embed_py.md +++ b/docs/win_exe_with_embed_py.md @@ -100,7 +100,7 @@ Install all required packages from `requirements.txt`: cp app.py ../streamlit_exe ``` -#### πŸš€ After successfully completing all these steps, the Streamlit app will be available by running the run_app.bat file. +#### πŸš€ After successfully completing all these steps, the Streamlit app will be available by running the run_app.bat file. :pencil: You can still change the configuration of Streamlit app with .streamlit/config.toml file, e.g., provide a different port, change upload size, etc. diff --git a/win_exe_with_pyinstaller.md b/docs/win_exe_with_pyinstaller.md similarity index 79% rename from win_exe_with_pyinstaller.md rename to docs/win_exe_with_pyinstaller.md index e66eab07b..5082c3b63 100644 --- a/win_exe_with_pyinstaller.md +++ b/docs/win_exe_with_pyinstaller.md @@ -1,11 +1,11 @@ ## πŸ’» Create a window executable of streamlit app with pyinstaller :heavy_check_mark: -Tested with streamlit v1.29.0, python v3.11.4
+Tested with streamlit v1.29.0, python v3.11.4 -:warning: Support until streamlit version `1.29.0`
+:warning: Support until streamlit version `1.29.0` :point_right: For higher version, try streamlit app with embeddable python #TODO add link -To create an executable for Streamlit app on Windows, we'll use an pyinstaller.
+To create an executable for Streamlit app on Windows, we'll use an pyinstaller. Here's a step-by-step guide: ### virtual environment @@ -26,7 +26,7 @@ pip install pyinstaller ### streamlit files -create a run_app.py and add this lines of codes
+create a run_app.py and add this lines of codes ``` from streamlit.web import cli @@ -40,12 +40,14 @@ if __name__=='__main__': ### write function in cli.py -Now, navigate to the inside streamlit environment
-here you go
+Now, navigate to the inside streamlit environment + +here you go + ``` \Lib\site-packages\streamlit\web\cli.py ``` -for using our virtual environment, add this magic function to cli.py file:
+for using our virtual environment, add this magic function to cli.py file: ``` #can be modify name as given in run_app.py #use underscore at beginning @@ -55,9 +57,9 @@ def _main_run_clExplicit(file, command_line, args=[], flag_options=[]): ``` ### Hook folder -Now, need to hook to get streamlit metadata
-organized as folder, where the pycache infos will save
-like: \hooks\hook-streamlit.py
+Now, need to hook to get streamlit metadata +organized as folder, where the pycache infos will save +like: \hooks\hook-streamlit.py ``` from PyInstaller.utils.hooks import copy_metadata @@ -80,9 +82,9 @@ pyinstaller --onefile --additional-hooks-dir ./hooks run_app.py --clean ``` ### streamlit config -To access streamlit config create file in root
-(or just can be in output folder)
-.streamlit\config.toml
+To access streamlit config create file in root +(or just can be in output folder) +.streamlit\config.toml ``` # content of .streamlit\config.toml @@ -105,7 +107,8 @@ cp app.py dist/ ### add datas in run_app.spec (.spec file) -Add DATAS to the run_app.spec just created by compilation
+Add DATAS to the run_app.spec just created by compilation + ``` datas=[ ("myenv/Lib/site-packages/altair/vegalite/v4/schema/vega-lite-schema.json","./altair/vegalite/v4/schema/"), @@ -117,11 +120,11 @@ datas=[ ] ``` ### run final step to make executable -All the modifications in datas should be loaded with
+All the modifications in datas should be loaded with ``` pyinstaller run_app.spec --clean ``` -#### πŸš€ After successfully completing all these steps, the Windows executable will be available in the dist folder. +#### πŸš€ After successfully completing all these steps, the Windows executable will be available in the dist folder. :pencil: you can still change the configuration of streamlit app with .streamlit/config.toml file e-g provide different port, change upload size etc @@ -129,4 +132,3 @@ pyinstaller run_app.spec --clean ## Build executable in github action automatically Automate the process of building executables for your project with the GitHub action example [Test streamlit executable for Windows with pyinstaller](https://github.com/OpenMS/streamlit-template/blob/main/.github/workflows/test-win-exe-w-pyinstaller.yaml) -