diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..b25c15b81fae06e1c55946ac6270bfdb293870e8
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+*~
diff --git a/sample-management-custom/caosdb-server/caosdb-webui/build.properties.d/50.sample-management.conf b/sample-management-custom/caosdb-server/caosdb-webui/build.properties.d/50.sample-management.conf
new file mode 100644
index 0000000000000000000000000000000000000000..185d75ef202aff0b38c806fec8f0a3654be04931
--- /dev/null
+++ b/sample-management-custom/caosdb-server/caosdb-webui/build.properties.d/50.sample-management.conf
@@ -0,0 +1,27 @@
+#
+#
+# Copyright (C) 2024 IndiScale <info@indiscale.com>
+# Copyright (C) 2024 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+# For nicer rendering of the SSS results
+BUILD_MODULE_EXT_SSS_MARKDOWN=ENABLED
+# Showing id in labels helps for the container/sample id columns
+BUILD_MODULE_SHOW_ID_IN_LABEL=ENABLED
+
+# BIS specific variables
+BUILD_MODULE_EXT_BIS_SAMPLEMANAGEMENT=ENABLED
+BUILD_MODULE_EXT_BIS_STOCKMANAGEMENT=ENABLED
diff --git a/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_bis_samplemanagemet.js b/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_bis_samplemanagemet.js
new file mode 100644
index 0000000000000000000000000000000000000000..a84247dadc323329b34dfee5cacc3ee6e2c9d15a
--- /dev/null
+++ b/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_bis_samplemanagemet.js
@@ -0,0 +1,510 @@
+/*
+ * ** header with license infoc
+ * ...
+ */
+
+'use strict';
+
+/**
+ * This module adds to forms to BIS that relate to sample management.
+ * One form allows to register samples.
+ * The other allows to update registered samples.
+ *
+ * @module ext_flight_preview
+ * @version 0.1
+ *
+ * @requires nothing yet
+ * (pass the dependencies as arguments)
+ */
+const ext_bis_samplemanagement = function($, navbar, log, form_elements, form_panel, bis_custom_reference_resolver) {
+    const logger = log.getLogger("samplemanagement_form");
+    const tool_box = "Sample Management"
+    const upload_sample_template_title = "Upload sample template"; // title of the form and text in the toolbox
+    const upload_sample_template_panel_id = "upload_sample_template_form_panel";
+    const register_new_samples_title = "Register new samples"; // title of the form and text in the toolbox
+    const register_new_samples_panel_id = "register_new_samples_form_panel";
+    const registerNewChildSamplesTitle = "Register new child samples"; // title of the form and text in the toolbox
+    const registerNewChildSamplesPanelId = "register_new_children_form_panel";
+    const upload_pdf_id = "upload_pdf_form_panel";
+    const upload_pdf_title = "Upload PDF file";
+    const export_csv_id = "export_sample_csv";
+    const export_csv_title = "Export Samples to CSV";
+    const accepted_file_formats = [
+        ".csv",
+        ".CSV",
+        "application/csv",
+        "text/csv",
+    ];
+    const required_column_names = [
+        "BIS ID",
+        "Date collected start",
+        "Main User",
+        "Gear",
+        "Gear configuration",
+        "Latitude start",
+        "Longitude start",
+        "Collection",
+        "PI",
+        "Storage ID",
+        "Nagoya case number",
+        "Sample container",
+        "Fixation",
+        "StorageTemperature",
+        "SampleType",
+        "SampleTypeSpecific",
+        "Sphere",
+        "Sample Context",
+    ];
+    const requiredColumnNamesChildren = [
+        "BIS ID",
+        "Date sampled start",
+        "Fixation",
+        "Main User",
+        "Parent BIS ID",
+        "Sample Context",
+        "Sample container",
+        "SampleType",
+        "SampleTypeSpecific",
+        "Sphere",
+        "Storage ID",
+        "StorageTemperature",
+    ];
+    const non_sample_rt_column_names = [
+        "Platform",
+        "Campaign",
+        "Station ID",
+        "Station number",
+        "Subevent",
+        "Time collected start",
+        "Date collected stop",
+        "Time collected stop",
+        "Timezone",
+        "Hol",
+        "Sampling depth start",
+        "Sampling depth stop",
+        "Water depth start",
+        "Water depth stop",
+        "Latitude stop",
+        "Longitude stop",
+        "Taxonomic group/Species",
+        "AphiaID",
+        "On-board sampleID",
+        "Extraction ID",
+        "Duplicate number",
+        "Other fixations",
+        "Notes / remarks",
+        "Forlmaldehyde_box",
+        "OSIS URL",
+        "Ecotaxa URL",
+        "PDFReport",
+        "Date sampled start",
+        "Date sampled stop",
+        "Time sampled start",
+        "Time sampled stop",
+        "Parent BIS ID"
+    ]
+    const allColumnNames = non_sample_rt_column_names.concat(requiredColumnNamesChildren).concat(required_column_names);
+
+    const unused_property_names = [
+        'Container',
+        'Event',
+        "Main User",
+        'NagoyaCase',
+        "Parent Sample",
+        "Person",
+        "SampleType",
+        "Time start",
+        "Time stop"
+    ]
+
+    const upload_sample_template_form_config = {
+        script: "upload_sample_template.py",
+        fields: [{
+            type: "file",
+            name: "sample_template_file",
+            label: "Sample import template",
+            required: true,
+            cached: false,
+            accept: accepted_file_formats.join(","),
+            help: "Select the filled out import template you want to upload."
+        }, ],
+    };
+
+    const export_csv_form_config = {
+        script: "export_sample_csv.py",
+        fields: [{
+            type: "text",
+            name: "bis_ids",
+            label: "BIS IDS",
+            required: false,
+            pattern: "((([0-9]+\\s*-\\s*){0,1}[0-9]+)\\s*,\\s*)*(([0-9]+\\s*-\\s*){0,1}[0-9]+)",
+            cached: false,
+            help: "Comma-separated list of single BIS IDs or ranges, e.g. '124, 126-139, 242'",
+        }, {
+            type: "text",
+            name: "query_string",
+            label: "Query string",
+            help: "As an alternative, specify the samples by pasting a valid select or find query in this field. All samples in the result set will be exported."
+        }, {
+            type: "checkbox",
+            name: "noEmpyColumns",
+            label: "Remove empty columns",
+            help: "Columns that are empty for all exported samples will be deleted from the exported CSV file."
+        }],
+    };
+
+    /**
+     * Reset the column names select field of the register samples form with
+     * the values of the template selected in the template field
+     */
+    const reset_columns_field = async function(form) {
+        var template_field = form_elements.get_fields(form, "register_sample_template")[0];
+        var template_id = $(template_field).find("select:input")[0].value;
+        var template_ent = await transaction.retrieveEntityById(template_id);
+        var names = $(template_ent).find("Property[name='column_names']").find("Value").toArray().map(e => e.textContent);
+        var col_field = form_elements.get_fields(form, "column_names")[0];
+        $(col_field).find(".selectpicker").selectpicker('val', names)
+    };
+
+    /**
+     * The register sample form has some special interaction among fields.
+     * This function create the form and adds callbacks that facilitate thiese
+     * interactions.
+     *
+     * @return {HTMLElement} form
+     */
+    const _initRegisterSampleForm = function(requiredColumnNames, showTemplateField) {
+        const register_new_samples_form_config = {
+            script: "register_new_samples.py",
+            fields: [{
+                    type: "reference_drop_down",
+                    name: "responsible_person",
+                    label: "Main user",
+                    query: "FIND RECORD Person",
+                    make_desc: bis_custom_reference_resolver.resolve_person_reference,
+                    required: true
+                },
+                {
+                    type: "integer",
+                    name: "number_of_samples",
+                    label: "Number of Samples",
+                    required: true
+                },
+                //{ type: "checkbox", name: "samples_from_cruise", label: "Samples from a cruise", required: false, help: "When this option is selected the template will include fields for cruise-secific sampling, e.g. the platform, campaign, station and hol."},
+                //{ type: "checkbox", name: "start_and_end_time", label: "Include start and end times", required: false, help: "If this option is selected, the template will include start- and end-field for Date and Time. Else it will only include one date and one time field."},
+                //{ type: "integer", name: "number_of_locations", label: "Positions per Event", required: true, help: "The number of geographic positions you want to include for each sampling even. At least one position is required. If the sampling is stationary one should be enought. But for example, if you are trawling, you can include multiple positions of the trawl."},
+                {
+                    type: "select",
+                    name: "column_names",
+                    label: "Column names:",
+                    options: [],
+                    required: false,
+                    multiple: true
+                },
+            ],
+        };
+        if (showTemplateField) {
+            register_new_samples_form_config.fields = [{
+                type: "reference_drop_down",
+                name: "register_sample_template",
+                label: "Template",
+                query: "FIND RECORD RegisterSampleTemplate",
+                required: false,
+                help: "Templates allow to store frequently used sets of column names.",
+                make_desc: getEntityName
+            }].concat(register_new_samples_form_config.fields);
+        }
+
+        var form = form_elements.make_form(register_new_samples_form_config);
+        if (showTemplateField) {
+            // add callback that resets column names field when template changes
+
+            form_elements.field_ready(
+                    form_elements.get_fields(form, "register_sample_template")[0])
+                .then((field) => ($(field).find(":input").change(() => reset_columns_field(form))))
+                .catch(err => logger.error(err));
+        }
+
+        // refresh the options of the column names select field once the list
+        // is available (The sample RT needs to be retrieved
+        var column_name_select = $(form).find("select[name='column_names']");
+        collect_column_names(requiredColumnNames).then(function(names) {
+            for (let option of names) {
+                column_name_select.append(
+                    form_elements._make_option(option.value, option.label));
+            }
+            column_name_select.selectpicker('refresh');
+        }).catch(err => logger.error(err));
+        var col_row = $(form).find(".row[data-field-name='column_names']")[0];
+        col_row.before($(
+            '<div class="row caosdb-f-field caosdb-v-field caosdb-f-form-field-required" data-field-name="required_column_names"><label for="required_column_names" data-property-name="required_column_names" class="col-form-label col-sm-3">Required Columns</label><div class="caosdb-f-property-value col-sm-9"><textarea class="form-control rows="2"  name="required_column_names" required="" disabled>' + requiredColumnNames.join(", ") + '</textarea></div></div>')[0]);
+        return form
+    }
+
+    const initRegisterSampleForm = function() {
+        return _initRegisterSampleForm(required_column_names, true);
+    }
+
+    const initRegisterChildSamplesForm = function() {
+        return _initRegisterSampleForm(requiredColumnNamesChildren, false);
+    }
+
+    const initRegisterTemplateForm = function() {
+        const config = {
+            script: "register_sample_template.py",
+            fields: [{
+                type: "text",
+                name: "template_name",
+                label: "Name of the new sample-registration template",
+                required: true
+            }, {
+                type: "reference_drop_down",
+                name: "main_user",
+                label: "Main user",
+                query: "FIND RECORD Person",
+                make_desc: bis_custom_reference_resolver.resolve_person_reference,
+                required: true
+            }, {
+                type: "select",
+                name: "column_names",
+                label: "Column names",
+                options: [],
+                required: true,
+                multiple: true
+            }]
+        };
+        const form = form_elements.make_form(config);
+
+        // TODO(fspreck): Tidy up to remove code duplications.
+        const column_name_select = $(form).find("select[name='column_names']");
+        collect_column_names(required_column_names).then(names => {
+            for (let option of names) {
+                column_name_select.append(
+                    form_elements._make_option(option.value, option.label)
+                );
+            }
+            column_name_select.selectpicker("refresh");
+        }).catch(err => logger.error(err));
+        var col_row = $(form).find(".row[data-field-name='column_names']")[0];
+        col_row.before($(
+            '<div class="row caosdb-f-field caosdb-v-field caosdb-f-form-field-required" data-field-name="required_column_names"><label for="required_column_names" data-property-name="required_column_names" class="col-form-label col-sm-3">Required Columns (always present)</label><div class="caosdb-f-property-value col-sm-9"><textarea class="form-control rows="2"  name="required_column_names" required="" disabled>' + required_column_names.join(", ") + '</textarea></div></div>')[0]);
+        return form
+    }
+
+    /**
+     * Creates a list of possible column names for a fixed list and the
+     * properties of the sample RT
+     *
+     * @return {Array} Options
+     */
+    const collect_column_names = async function(requiredColumnNames) {
+        const a = await query('find recordtype with name=sample');
+        const column_names = $(a[0]).find(".caosdb-property-name").toArray().map(e => e.textContent);
+        var options = [...new Set(allColumnNames.concat(column_names))];
+        options = options.filter(n => !(unused_property_names.includes(n) || requiredColumnNames.includes(n))).sort();
+        return options.map(e => ({
+            value: e,
+            label: e
+        }));
+    }
+
+    const get_pdf_record_type_id = async (name) => {
+        const response = await connection.get("Entity/?query=SELECT id FROM RECORDTYPE " + name);
+        return response.evaluate("/Response/Entity/@id", response, null, XPathResult.STRING_TYPE)?.stringValue;
+    }
+
+    const init_pdf_upload = () => {
+        const container = $(`<div class="row"/>`);
+        const config = {
+            directoryBase: "samples/pdfs",
+            directoryReadOnly: true,
+            directoryShow: false,
+            recordTypeShow: false,
+            recordType: get_pdf_record_type_id("PDFReport"),
+            accept: "application/pdf,.pdf",
+            onCancel: (e) => {
+                // e.target.PARENTNODE.form here, because thats how the file
+                // upload library works
+                e.target.parentNode.form.dispatchEvent(form_elements.cancel_form_event);
+            },
+            onFinish: (e) => {
+                // e.target.form here, because thats how our own buttons work
+                e.target.form.dispatchEvent(form_elements.cancel_form_event);
+            },
+        }
+        caosdb_file_upload_widget.create_file_upload_widget(container[0], config);
+        return container[0];
+    }
+
+    /**
+     * Add a button to the navbar, saying "Upload CSV File" which opens a
+     * form for file upload.
+     */
+    const init_show_samplemanagement_panel_button = async function() {
+        //var form_wrapper = form_elements.make_form(config);
+        navbar.add_tool(upload_sample_template_title, tool_box, {
+            callback: form_panel.create_show_form_callback(
+                upload_sample_template_panel_id,
+                upload_sample_template_title,
+                upload_sample_template_form_config)
+        });
+        navbar.add_tool(register_new_samples_title, tool_box, {
+            callback: form_panel.create_show_form_callback(
+                register_new_samples_panel_id,
+                register_new_samples_title,
+                undefined,
+                initRegisterSampleForm)
+        });
+        navbar.add_tool(registerNewChildSamplesTitle, tool_box, {
+            callback: form_panel.create_show_form_callback(
+                registerNewChildSamplesPanelId,
+                registerNewChildSamplesTitle,
+                undefined,
+                initRegisterChildSamplesForm)
+        });
+        navbar.add_tool("Create sample template", tool_box, {
+            callback: form_panel.create_show_form_callback(
+                "create-sample-template",
+                "Create sample template",
+                undefined,
+                initRegisterTemplateForm)
+        });
+        // Set auto_focus=false because of WebUI bug: https://gitlab.com/linkahead/linkahead-webui/-/issues/258
+        navbar.add_tool(upload_pdf_title, tool_box, {
+            callback: form_panel.create_show_form_callback(
+                upload_pdf_id,
+                upload_pdf_title,
+                undefined,
+                init_pdf_upload,
+                false
+            )
+        });
+        const csv_callback = form_panel.create_show_form_callback(
+            export_csv_id,
+            export_csv_title,
+            export_csv_form_config
+        );
+        navbar.add_tool(export_csv_title, tool_box, {
+            callback: (e) => {
+                csv_callback(e);
+                const input_field = document.querySelector(`#${export_csv_id} form`)[0]
+
+                const check_pattern = () => {
+                    input_field.checkValidity();
+                    input_field.reportValidity();
+                }
+                input_field.addEventListener("blur", check_pattern);
+            }
+        });
+    };
+
+    const init_show_igsn_button = function() {
+        const toolBox = "IGSN Tools";
+        const registerButtonName = "Publish IGSN Samples and events";
+        const registerButtonTitle = `
+Set IGSNs and DOIs for SourceEvents that do not yet have them and
+publish them and all connected samples that don't haven an active
+embargo..
+`;
+        const registerButtonAnonymousHelp = `
+<p>All events with newly generated DOIs, their samples, and all
+referenced Records apart from Persons and Containers will be made
+visible without login, except if the samples have an active embargo
+(i.e., an embargo date that is not in the past).</p>
+
+<p>Depending on the total number of entities to be updated this way, this
+may take a while and even result in TimeOut errors. Note that despite
+any possible timeout errors, the updates will run in the background.</p>
+                `;
+        const registerScriptFormConfig = {
+            script: "generate_igsn_doi.py",
+            fields: []
+        }
+        const formCreator = () => {
+            const form = form_elements.make_form(registerScriptFormConfig);
+            $(form).find("form").before(registerButtonAnonymousHelp);
+            return form;
+        };
+        const registerIGSNId = "register_igsn_doi"
+        const registerCallback = form_panel.create_show_form_callback(
+            registerIGSNId,
+            registerButtonName,
+            undefined,
+            formCreator
+        );
+        navbar.add_tool(registerButtonName, toolBox, {
+            callback: registerCallback,
+            title: registerButtonTitle
+        });
+
+        // Sample export for IGSN
+        const export_igsn_id = "export_sample_igsn";
+        const export_igsn_title = "Export Samples with IGSN";
+        const export_igsn_form_config = {
+            script: "export_sample_csv.py",
+            fields: [{
+                type: "date",
+                name: "from_date",
+                label: "From Date",
+                required: true,
+                cached: false,
+                help: "Samples that were inserted after this date are included",
+            }, {
+                type: "text",
+                name: "bis_ids",
+                label: "BIS IDS",
+                required: false,
+                pattern: "((([0-9]+\\s*-\\s*){0,1}[0-9]+)\\s*,\\s*)*(([0-9]+\\s*-\\s*){0,1}[0-9]+)",
+                cached: false,
+                help: "Comma-separated list of single BIS IDs or ranges, e.g. '124, 126-139, 242'",
+            }],
+        };
+
+
+
+        const igsn_callback = form_panel.create_show_form_callback(
+            export_igsn_id,
+            export_igsn_title,
+            export_igsn_form_config
+        );
+        navbar.add_tool(export_igsn_title, "IGSN Tools", {
+            callback: (e) => {
+                igsn_callback(e);
+                $("[data-field-name=bis_ids]").hide()
+            }
+        });
+
+    }
+
+
+    const is_authorized = function() {
+        // return true //for testing
+        return isAuthenticated() //&& (userHasRole("stockmanager") || userHasRole("administration"))
+    }
+
+
+    const init = function() {
+        if (is_authorized()) {
+            init_show_samplemanagement_panel_button();
+        }
+        if (userHasRole("Curator") || userHasRole("administration")) {
+            init_show_igsn_button();
+        }
+    }
+
+
+    /* the main function must return the initialization of the module */
+    return {
+        init: init,
+    };
+}($, navbar, log, form_elements, form_panel, bis_custom_reference_resolver);
+
+// this will be replaced by require.js in the future.
+$(document).ready(function() {
+    // use a variable starting with `BUILD_MODULE_` to enable your module
+    // the build variable has to be enabled in the `build.properties.d/` directory.
+    // Otherwise the module will not be activated.
+    if ("${BUILD_MODULE_EXT_BIS_SAMPLEMANAGEMENT}" === "ENABLED") {
+        caosdb_modules.register(ext_bis_samplemanagement);
+    }
+});
diff --git a/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_bis_stockmanagement.js b/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_bis_stockmanagement.js
new file mode 100644
index 0000000000000000000000000000000000000000..b8630a40836d7bc971ffe749af898d0f583cebf1
--- /dev/null
+++ b/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_bis_stockmanagement.js
@@ -0,0 +1,216 @@
+/*
+ * ** header with license infoc
+ * ...
+ */
+
+'use strict';
+
+/**
+ * @module ext_bis_stockmanagement
+ * @version 0.2
+ *
+ * @requires($, navbar, log, form_elements, form_panel, bis_custom_reference_resolver)
+ */
+const ext_bis_stockmanagement = function ($, navbar, log, form_elements, form_panel, bis_custom_reference_resolver) {
+
+    const tool_box = "Storage Management" // The item that is shown in the top-navbar
+
+    ///////////////// REGISTER NEW CONTAINERS START
+    const register_new_containers_title = "Register new Containers";
+    const register_new_containers_panel_id = "register_new_containers_form_panel";
+    const register_new_containers_form_config = {
+        // the script can be found in profiles/default/custom/caosdb-server/scripting/bin/
+        // it is called like ./register_new_containers.py form.json where form.json
+        // is a json dictionary cntaining a "name: value" pair for each form element.
+
+        // NOTE: The script has to be executable ("chmod +x scriptname")
+        // Else the server will resond with 400: BadRequest
+
+        // NOTE: The she-bang (#!/usr/bin/env python3) in line 1 of the script is obligatory!
+        // Else there will be syntax errors
+        script: "register_new_containers.py",
+        fields: [{
+                type: "reference_drop_down",
+                name: "responsible_entity",
+                label: "Responsible entity",
+                query: "FIND RECORD Responsible",
+                make_desc: getEntityName,
+                required: true
+            },
+            {
+                type: "reference_drop_down",
+                name: "container_type",
+                label: "Container type",
+                query: "FIND RECORD ContainerType",
+                make_desc: getEntityName,
+                required: true
+            },
+            {
+                type: "text",
+                name: "container_size",
+                label: "Container size",
+                help: "A text describing the container size, e.g. '81 spaces' or '20*20*10 cm^3'."
+            },
+            {
+                type: "integer",
+                name: "number_of_containers",
+                label: "Number of containers",
+                required: true
+            },
+            {
+                type: "select",
+                name: "file_format",
+                label: "Template file format",
+                options: [{
+                        value: "csv",
+                        label: "Comma separated file (.csv)"
+                    },
+                    // { value: "xlsx", label: "Microsoft Excel spreadsheet (.xlsx)" }, // TODO: implement
+                    // { value: "ods", label: "OpenDocument Spreadsheet (.ods)" }, // TODO: implement
+                ],
+                value: "csv",
+                required: true
+            },
+            {
+                type: "reference_drop_down",
+                name: "parent_id",
+                label: "Parent container",
+                query: "FIND RECORD Container",
+                make_desc: bis_custom_reference_resolver.resolve_default,
+                required: false
+            },
+        ],
+    };
+    ///////////////// REGISTER NEW CONTAINERS END
+
+    ///////////////// UPDATE CONTAINERS START
+    const update_containers_title = "Update Containers";
+    const update_containers_panel_id = "update_containers_form_panel";
+    const accepted_file_formats = [
+        ".csv",
+        ".CSV",
+        "application/csv",
+        "text/csv",
+    ]
+    const update_containers_form_config = {
+        script: "update_containers.py",
+        fields: [{
+            type: "file",
+            name: "container_metadata_file",
+            label: "Container metadata spreadsheet",
+            required: true,
+            cached: false,
+            accept: accepted_file_formats.join(","),
+            help: "Select the filled out metadata spreadsheet you want to upload."
+        }, ],
+    };
+    ///////////////// UPDATE CONTAINERS END
+
+    ///////////////// EXPORT CONTAINER CSV START
+    const export_csv_id = "export_container_csv";
+    const export_csv_title = "Export existing containers to CSV";
+
+    const export_csv_form_config = {
+        script: "export_container_csv.py",
+        fields: [{
+            type: "text",
+            name: "bis_ids",
+            label: "BIS IDS",
+            required: true,
+            pattern: "((([0-9]+\\s*-\\s*){0,1}[0-9]+)\\s*,\\s*)*(([0-9]+\\s*-\\s*){0,1}[0-9]+)",
+            cached: false,
+            help: "Comma-separated list of single BIS IDs or ranges, e.g. '124, 126-139, 242'"
+        }]
+    };
+    ///////////////// EXPORT CONTAINER CSV END
+
+
+    /**
+     * Add a button to the top-navbar, saying "Stock Management" which opens a
+     * dropdown for Container registration and for uploading filled-in container
+     * forms.
+     */
+    const init_show_stockmanagement_panel_buttons = function () {
+        create_register_new_containers_button();
+        create_update_containers_button();
+
+        const csv_callback = form_panel.create_show_form_callback(
+            export_csv_id,
+            export_csv_title,
+            export_csv_form_config
+        );
+        navbar.add_tool(export_csv_title, tool_box, {
+            callback: (e) => {
+                csv_callback(e);
+                const input_field = document.querySelector(`#${export_csv_id} form`)[0]
+
+                // TODO move auto focus to form_panel module
+                input_field.focus();
+
+                // TODO move pattern to form_elements module
+                input_field.setAttribute("pattern", export_csv_form_config.fields[0].pattern);
+
+                const check_pattern = () => {
+                    input_field.checkValidity();
+                    input_field.reportValidity();
+                }
+                input_field.addEventListener("blur", check_pattern);
+            }
+        });
+    };
+
+    const create_register_new_containers_button = function () {
+        navbar.add_tool(register_new_containers_title, tool_box, {
+            callback: form_panel.create_show_form_callback(
+                register_new_containers_panel_id,
+                register_new_containers_title,
+                register_new_containers_form_config)
+        });
+    };
+
+    const create_update_containers_button = function () {
+        navbar.add_tool(update_containers_title, tool_box, {
+            callback: form_panel.create_show_form_callback(
+                update_containers_panel_id,
+                update_containers_title,
+                update_containers_form_config)
+        });
+    };
+
+    const logger = log.getLogger("stockmanagement_form");
+
+    /*
+     * Check if the user is authenticated and has at least stockmanager or
+     * administrator role.
+     */
+    const is_authorized = function () {
+        return isAuthenticated() && (userHasRole("Scientist") || userHasRole("Stock manager") || userHasRole("administration") || userHasRole("Curator"));
+    }
+
+    /*
+     * Initialization of the module. Adds the "Stock Management" Button to the
+     * top-navbar is_authorized() returns true, i.e. the user is authenticated
+     * and has either stockmanager or administration role.
+     */
+    const init = function () {
+        if (is_authorized()) {
+            init_show_stockmanagement_panel_buttons();
+        }
+    }
+
+    /* the main function must return the initialization of the module */
+    return {
+        init: init,
+    };
+}($, navbar, log, form_elements, form_panel, bis_custom_reference_resolver); // this will be replaced by require.js in the future.
+
+$(document).ready(function () {
+    // use a variable starting with `BUILD_MODULE_` to this module
+    // the build variable has to be enabled in the `build.properties.d/` directory.
+    // Otherwise the module will not be activated.
+    if ("${BUILD_MODULE_EXT_BIS_STOCKMANAGEMENT}" === "ENABLED") {
+        caosdb_modules.register(ext_bis_stockmanagement);
+    }
+});
+// const form = form_elements.make_form(config);
+// $("body").append(form);
diff --git a/sample-management-custom/caosdb-server/scripting/bin/bis_utils.py b/sample-management-custom/caosdb-server/scripting/bin/bis_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..d70503062881b0df7701fff1d7e6b6fc936a6921
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/bis_utils.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2024 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2024 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import os
+import pandas as pd
+import re
+
+from urllib.parse import urljoin
+
+import linkahead as db
+
+from caosadvancedtools.serverside.helper import send_mail
+from caoscrawler.config import get_config_setting
+from linkahead import get_entity_by_name
+
+SPECIAL_TREATMENT_SAMPLE = [
+    "BIS ID",
+    "Collection",
+    "Date collected start",
+    "Date collected stop",
+    "Date sampled start",
+    "Date sampled stop",
+    "Gear configuration",
+    "Gear",
+    "Hol",
+    "Latitude start",
+    "Latitude stop",
+    "Longitude start",
+    "Longitude stop",
+    "Main User",
+    "Nagoya case number",
+    "PDFReport",
+    "PI",
+    "Parent BIS ID",
+    "Person",
+    "Sampling Person",
+    "Sampling depth start",
+    "Sampling depth stop",
+    "Sphere",
+    "Station ID",
+    "Station number",
+    "Storage Container Label",
+    "Storage ID",
+    "Subevent",
+    "Time collected start",
+    "Time collected stop",
+    "Time sampled start",
+    "Time sampled stop",
+    "Timezone",
+    "Water depth start",
+    "Water depth stop",
+]
+
+IGNORED_COLUMN_NAMES_SAMPLE = [
+    "BIS URL",
+    "Date",
+    "IGSN URL",
+    "IGSN",  # TODO This will be relevant for external IGSNs in the future.
+    "Parent Sample",
+    "Sampling depth",
+    "Storage chain",
+    "Water depth",
+]
+
+
+# This can be used for hard-coded descriptions
+COLUMN_DESCRIPTIONS = {
+    "BIS ID": "An ID generated by the BIS (either integer or URL to this entity). Do not change this column!",
+    "BIS label": "The label automatically assigned by the BIS. Do not change this column.",
+    "Collection": "Collection(s) to which this container or samples belongs (separated by ';')",
+    "Container Contents": "Notes concerning the contents of this container",
+    "Container size": "The size of the container (e.g 81 spaces)",
+    "Container type": "The type of container. Do not change this column.",
+    "Custom label": "Any additional custom label you put on the container",
+    "Date collected start": "The start date in format YYYY-MM-DD",
+    "Date collected stop": "The end date in format YYYY-MM-DD",
+    "Gear configuration": "How gear is configured, meshsize of the net, filter pore size, what kind of trawl",
+    "Latitude start": "Latitude where sampling started; in decimal degrees,use \".\" as decimal sign, use - for S",
+    "Latitude stop": "Latitude where sampling ended; in decimal degrees,use \".\" as decimal sign, use - for S",
+    "Longitude start": "Longitude where sampling started; in decimal degrees,use \".\" as decimal sign, use - for W",
+    "Longitude stop": "Longitude where sampling ended; in decimal degrees,use \".\" as decimal sign, use - for W",
+    "Notes / remarks": "Field for notes",
+    "PDFReport": "A PDF containing additional information on the container or sampe contents",
+    "PI": "The PI (by abbreviation) of the container or sample",
+    "Parent container": "The BIS ID (integer or URL) or name or BIS label of the container where this container is stored.",
+    "Sampling depth start": "The depth in meters where the sampling started as a positive value, not bottom depth",
+    "Sampling depth stop": "The depth in meters where the sampling ended as a positive value,not bottom depth",
+    "Storage ID": "BIS ID of the Container that holds this sample",
+    "Subevent": "Subevent given by D-Ship in Stationlog. Expl. AL123_14-4",
+    "Timezone": "Timezone: Either UTC, CET, .... or in the form +hh[:mm], -hh:[mm].",
+    "Water depth start": "The bottom depth in meters where the sampling started as a positive value",
+    "Water depth stop": "The bottom depth in meters where the sampling ended as a positive value",
+}
+
+
+def get_do_not_insert_type_names(override_names: list[str] = []):
+    """Return all names of RecordTypes with parent
+    ``ControlledRecordType``, the name of which is not included in
+    ``override_names``.
+
+    """
+    # To lower-case for case-insensitivity
+    overrides_lower = [name.lower() for name in override_names]
+    do_not_inserts = [rt.name for rt in db.execute_query(
+        "SELECT name FROM RECORDTYPE ControlledRecordType")]
+
+    return [name for name in do_not_inserts if name.lower() not in overrides_lower]
+
+
+def get_email_from_username(suffix: str = "@geomar.de"):
+    """Return the email address for a given username. For now, just
+    `username+suffix`.
+
+    """
+    # connection.get_username doesn't work with auth tokens, so use the user name.
+    username = db.Info().user_info.name
+    return f"{username}{suffix}"
+
+
+def create_email_with_link_text(ftype: str, fpath_within_shared: str):
+    """Return a standard email body text stating the type of download and the
+    link to the file.
+
+    Parameters
+    ----------
+    ftype : str
+        Type of the download, e.g., "Sample export" or "Sample registration".
+    fpath_within_shared : str
+        Relative path of the file to be downloaded w.r.t. the SHARED directory.
+    """
+
+    public_host_url = get_config_setting("public_host_url")
+    full_link = urljoin(public_host_url, os.path.join("/Shared", fpath_within_shared))
+
+    body_text = f"""
+Hello,
+
+Your {ftype} is ready for download.  You can download it here:
+
+{full_link}
+
+Note that this link will become invalid after a BIS server restart.
+    """
+
+    return body_text
+
+
+def send_mail_with_defaults(**kwargs):
+    """Thin wrapper for caosadvancedtools.serverside.helper.send_mail that fills
+    some arguments with reasonable defaults: `from_addr` and `to` default to the
+    crawler's sendmail config if not specified.
+
+    """
+    if not "from_addr" in kwargs:
+        kwargs["from_addr"] = get_config_setting("sendmail_from_address")
+    if not "to" in kwargs:
+        kwargs["to"] = get_config_setting("sendmail_to_address")
+    if not "cc" in kwargs:
+        # If none is specified, CC curator if curator is not in the recipients
+        # already.
+        curator_addr = get_config_setting("sendmail_to_address")
+        if isinstance(kwargs["to"], list):
+            if not curator_addr in kwargs["to"]:
+                kwargs["cc"] = curator_addr
+        elif kwargs["to"] != curator_addr:
+            kwargs["cc"] = curator_addr
+
+    send_mail(**kwargs)
+
+
+def replace_entity_urls_by_ids(data: pd.DataFrame, eid_columns: list[str] = ["BIS ID", "Parent container"]):
+    """Replace all entity urls in the relevant columns `BIS ID` and `Parent
+    container` by their entity id.
+
+    """
+    entity_url_pattern = r"^http(s)?:\/\/(.*)?\/(E|e)ntity\/(?P<eid>(.*))$"
+    for index, row in data.iterrows():
+        for cname in eid_columns:
+            if cname in row:
+                matches = re.match(entity_url_pattern, str(row[cname]))
+                if matches:
+                    data.at[index, cname] = matches.groupdict()["eid"]
+
+    return data
+
+
+def return_value_if_not_none(val):
+    """Workaround for somewhat weird pandas behavior. Return value if actual
+    value, otherwise, return None.
+
+    """
+    if isinstance(val, list):
+        if len(val) == 0:
+            return None
+        if len(val) == 1:
+            if pd.isnull(val[0]) or val[0] is None or f"{val[0]}".lower() == "nan" or f"{val[0]}" == "":
+                return None
+    elif pd.isnull(val) or val is None or f"{val}".lower() == "nan" or f"{val}" == "":
+        return None
+    return val
+
+
+def whitespace_cleanup_in_df(df: pd.DataFrame):
+    """Strip all leading and trailing whitespaces from all str values in df."""
+
+    for col_name in df.columns:
+        if pd.api.types.is_string_dtype(df[col_name].dtype):
+            df[col_name] = df[col_name].str.strip()
+
+    return df
+
+
+def get_description_row(column_names: list[str]):
+
+    descriptions = []
+    for name in column_names:
+        descr = ""
+        if name in COLUMN_DESCRIPTIONS:
+            # Simple: Hard coded
+            descr = COLUMN_DESCRIPTIONS[name]
+        else:
+            # Try properties first
+            cand = db.execute_query(f"FIND PROPERTY WITH name='{name}'")
+            if len(cand) == 0:
+                # Try RecordTypes
+                cand = db.execute_query(f"FIND RECORDTYPE WITH name='{name}'")
+            if len(cand) == 1 and cand[0].description is not None:
+                descr = cand[0].description
+        if ',' in descr:
+            descr = f"\"{descr}\""
+        descriptions.append(descr)
+
+    if descriptions:
+        descriptions[0] = f"#{descriptions[0]}"
+
+    return descriptions
+
+
+def get_options_row(column_names: list[str], override_names: list[str] = []):
+    """Return a list of possible options for all column names which
+    correspond to controlled RecordTypes.
+
+    """
+
+    controlled_rts = get_do_not_insert_type_names(override_names)
+    # We will need special treatment for gears, but only if they are
+    # in the column_names
+    gears_and_configs = {}
+    if "Gear" in column_names:
+        gears_recs = db.execute_query("SELECT Parent, Configuration FROM RECORD Gear")
+        for rec in gears_recs:
+            # We ignore the case of multiple parents for gears for now.
+            gear_name = rec.parents[0].name
+            if rec.get_property("Configuration") is not None:
+                config = rec.get_property("Configuration").value
+            else:
+                config = None
+            if gear_name in gears_and_configs:
+                gears_and_configs[gear_name].append(config)
+            else:
+                gears_and_configs[gear_name] = [config]
+
+    options = []
+    for name in column_names:
+        if name.lower() == "gear":
+            option = ";".join(list(gears_and_configs.keys()))
+        elif name.lower() == "gear configuration":
+            option = ";".join([f"{key}:{val}" for key, val in gears_and_configs.items()])
+        elif name in controlled_rts:
+            rt = get_entity_by_name(name, role="RECORDTYPE")
+            if len(rt.properties) == 0:
+                # Records are identified by name
+                recs = db.execute_query(f"SELECT name FROM RECORD '{name}'")
+                option = ";".join([rec.name for rec in recs if name is not None])
+            else:
+                # We use the first property (should be only) as identifier.
+                pname = rt.properties[0].name
+                recs = db.execute_query(f"SELECT '{pname}' FROM RECORD '{name}'")
+                non_empty_values = [rec.get_property(
+                    pname).value for rec in recs if rec.get_property(pname).value is not None]
+                option = ";".join(non_empty_values)
+        else:
+            option = ""
+        if ',' in option:
+            option = f"\"{option}\""
+        options.append(option)
+
+    if options:
+        options[0] = f"#{options[0]}"
+
+    return options
diff --git a/sample-management-custom/caosdb-server/scripting/bin/crawl_sample_data_async.py b/sample-management-custom/caosdb-server/scripting/bin/crawl_sample_data_async.py
new file mode 100755
index 0000000000000000000000000000000000000000..1c578f9e6e6b56c53805857996b13b90f4199e4b
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/crawl_sample_data_async.py
@@ -0,0 +1,708 @@
+#!/usr/bin/env python3
+#
+# This file is a part of the LinkAhead Project.
+#
+# Copyright (C) 2024 GEOMAR
+# Copyright (C) 2024 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2024 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public
+# License along with this program. If not, see
+# <https://www.gnu.org/licenses/>.
+#
+
+import logging
+import os
+import pandas as pd
+import re
+import sys
+
+from dateutil import parser as dateparser
+from pickle import UnpicklingError
+
+import linkahead as db
+
+from caosadvancedtools.datainconsistency import DataInconsistencyError
+from caosadvancedtools.serverside import helper
+from caoscrawler import Crawler, SecurityMode
+from caoscrawler.crawl import ForbiddenTransaction, _notify_about_inserts_and_updates
+from caoscrawler.exceptions import ImpossibleMergeError
+from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
+from caoscrawler.logging import configure_server_side_logging
+from linkahead.cached import cached_get_entity_by
+from linkahead.common.datatype import get_id_of_datatype
+
+from bis_utils import (get_do_not_insert_type_names,
+                       IGNORED_COLUMN_NAMES_SAMPLE,
+                       return_value_if_not_none,
+                       send_mail_with_defaults,
+                       SPECIAL_TREATMENT_SAMPLE)
+
+# suppress warning of diff function
+apilogger = logging.getLogger("linkahead.apiutils")
+apilogger.setLevel(logging.ERROR)
+
+logger = logging.getLogger("caosadvancedtools")
+
+
+def _notify_about_error(text, subject):
+
+    logger.error(text)
+    send_mail_with_defaults(subject=subject, body=text)
+
+
+def _is_ignored_column_name(name, parent_suffix="_parent"):
+
+    return name in IGNORED_COLUMN_NAMES_SAMPLE or name.endswith(parent_suffix)
+
+
+def _update_property(entity: db.Record, property_id: int, value, property_name="", datatype=None):
+    """
+    Set the property of an entity.
+
+    If the entity already has the property, just the value is set.
+    Else, the property is added to the entity
+    """
+    # TODO: Replace by assure_property_is etc.
+
+    # If the value in the spreadsheet is empty (nan)
+    if ((isinstance(value, list) and len(value) == 0)
+            or (not isinstance(value, list) and pd.isna(value))):
+        # Remove the property from te Entity if it has it
+        try:
+            entity.get_properties().get_by_name(property_name)
+            entity.remove_property(property_name)
+        except KeyError:
+            pass
+        return entity
+    if entity.get_property(property_id) is None:
+        if datatype:
+            entity.add_property(id=property_id, value=value, name=property_name, datatype=datatype)
+        else:
+            entity.add_property(id=property_id, value=value, name=property_name)
+        logger.debug("{}: Adding {} = {}".format(entity.id, property_id, value.id if
+                                                 isinstance(value, db.Entity) else value))
+    else:
+        if isinstance(value, list) and not entity.get_property(property_id).datatype.startswith("LIST"):
+            entity.get_property(property_id).datatype = db.LIST(
+                entity.get_property(property_id).datatype)
+        entity.get_property(property_id).value = value
+        logger.debug("{}: Setting {} = {}".format(entity.id, property_id, value.id if
+                                                  isinstance(value, db.Entity) else value))
+    return entity
+
+
+def _treat_date(date_val: str):
+
+    date_pattern = r"^[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2}$"
+    # Check general pattern since dateutil.parse is unreliable with incomplete
+    # dates (e.g., 2024-01) or wrong formats (e.g., 01.12.2024 is parsed as
+    # 2024-01-12).
+    if re.match(date_pattern, date_val) is None:
+        # ParserError for simplified error handling down the line.
+        raise dateparser.ParserError(f"{date_val} is not of the format YYYY-MM-DD.")
+    # Use dateutils.parser despite having checked the pattern to exclude
+    # nonsense dates like 2024-13-54.
+    return str(dateparser.parse(date_val).date())
+
+
+def _append_times_to_entity(ent, data, propname_prefix="Time", colname_time_prefix="Time collected", colname_date_prefix="Date collected"):
+    propname_start = f"{propname_prefix} start"
+    propname_stop = f"{propname_prefix} stop"
+    prop_start = cached_get_entity_by(name=propname_start)
+    prop_stop = cached_get_entity_by(name=propname_stop)
+    colname_time_start = f"{colname_time_prefix} start"
+    colname_time_stop = f"{colname_time_prefix} stop"
+    colname_date_start = f"{colname_date_prefix} start"
+    colname_date_stop = f"{colname_date_prefix} stop"
+
+    date_start = None
+    date_stop = None
+    time_start = None
+    time_stop = None
+    timezone = None
+    # Time start
+    if colname_date_start in data and return_value_if_not_none(data[colname_date_start]) is not None:
+        date_start = return_value_if_not_none(data[colname_date_start])
+        try:
+            date_start = _treat_date(date_start)
+        except dateparser.ParserError as perr:
+            logger.error(
+                f"There is a problem in '{colname_date_start}': {date_start}"
+                f" of sample {data['BIS ID']}: {perr}"
+            )
+            raise DataInconsistencyError
+    if colname_time_start in data and return_value_if_not_none(data[colname_time_start]) is not None:
+        if not "Timezone" in data or return_value_if_not_none(data["Timezone"]) is None:
+            logger.error(f"{colname_time_start} but no timezone given for sample "
+                         f"{data['BIS ID']}.")
+            raise DataInconsistencyError
+        time_start = return_value_if_not_none(data[colname_time_start])
+        timezone = return_value_if_not_none(data["Timezone"])
+        if date_start is None:
+            logger.error(
+                f"{colname_time_start} is given but {colname_date_start} is missing for sample {data['BIS ID']}.")
+            raise DataInconsistencyError
+        try:
+            _val = str(dateparser.parse(f"{date_start}T{time_start}{timezone}"))
+            ent = _update_property(ent, prop_start.id, property_name=prop_start.name, value=_val)
+        except dateparser.ParserError as perr:
+            logger.error(
+                f"Couldn't parse {colname_time_start}: {time_start} with timezone {timezone} "
+                f"of sample {data['BIS ID']}: {perr}"
+            )
+            raise DataInconsistencyError
+    elif date_start is not None:
+        ent = _update_property(ent, prop_start.id, value=date_start, property_name=prop_start.name)
+
+    # Time stop; raise error in case of stop without start
+    if colname_date_stop in data and return_value_if_not_none(data[colname_date_stop]) is not None:
+        date_stop = return_value_if_not_none(data[colname_date_stop])
+    if date_stop is not None and date_start is None:
+        logger.error(
+            f"{colname_date_stop} is given but {colname_date_start} is missing for sample {data['BIS ID']}.")
+        raise DataInconsistencyError
+    if date_stop is None:
+        _date_stop = date_start
+    else:
+        try:
+            _date_stop = _treat_date(date_stop)
+        except dateparser.ParserError as perr:
+            logger.error(
+                f"There is a problem in '{colname_date_stop}': {date_stop}"
+                f" of sample {data['BIS ID']}: {perr}"
+            )
+            raise DataInconsistencyError
+
+    if colname_time_stop in data and return_value_if_not_none(data[colname_time_stop]) is not None:
+        time_stop = return_value_if_not_none(data[colname_time_stop])
+        if time_start is None:
+            logger.error(
+                f"{colname_time_stop} is given but {colname_time_start} is missing for sample {data['BIS ID']}.")
+            raise DataInconsistencyError
+        # timezone is set by time start; if it hadn't been there, we would already have an error.
+        try:
+            _val = str(dateparser.parse(f"{_date_stop}T{time_stop}{timezone}"))
+        except dateparser.ParserError as perr:
+            logger.error(
+                f"Couldn't parse {colname_time_stop}: {time_stop} with timezone {timezone} "
+                f"of sample {data['BIS ID']}: {perr}"
+            )
+            raise DataInconsistencyError
+        ent = _update_property(ent, prop_stop.id, property_name=prop_stop.name, value=_val)
+    elif date_stop is not None:
+        # We check date_stop but we used the cleaned-up _date_stop as value
+        ent = _update_property(ent, prop_stop.id, property_name=prop_stop.name, value=_date_stop)
+
+    return ent
+
+
+def get_container(data):
+    """
+    Retrun the BIS ID of the Container Record that is identified by 'Storage contianer' in data.
+    A Container can either be identified via a BIS ID or via a BIS Label.
+
+    If no Container can be identified, an Error is raised, since creating/registering new
+    Containers has to be done before registering samples.
+    """
+    identified_by_label = False
+    container_identifier = data["Storage ID"]
+    # If the ID is not spcified, try to get the label
+    if "Storage Container Label" in data and pd.isnull(container_identifier):
+        container_identifier = data["Storage Container Label"]
+        identified_by_label = True
+
+    if identified_by_label:
+        container = _get_container_by_label(container_identifier)
+    else:
+        container = _get_container_by_id(container_identifier)
+
+    if container is not None:
+        return container
+    else:
+        msg = "Container: '{}' could not be identified.".format(container_identifier)
+        raise DataInconsistencyError(msg)
+
+
+def _get_container_by_id(id):
+    res = db.execute_query("FIND RECORD Container WITH id = '{}'".format(id))
+    if len(res) > 0:
+        return res[0]
+    else:
+        return None
+
+
+def _get_container_by_label(label):
+    res = db.execute_query("FIND RECORD Container WITH 'BIS Label' = '{}'".format(label))
+    if len(res) > 0:
+        return res[0]
+    else:
+        return None
+
+
+def get_event(data, gear_id):
+    # Events only have names if they have the subevent property.
+    if "Subevent" in data and return_value_if_not_none(data["Subevent"]) is not None:
+        event_name = f"{data['Subevent']}"
+        return _create_new_source_event(event_name, data, gear_id)
+
+    return _create_new_source_event(name=None, data=data, gear_id=gear_id)
+
+
+def _create_new_source_event(name, data, gear_id) -> db.Record:
+    event = db.Record(name)
+    event.add_parent("SourceEvent")
+    event = _append_times_to_entity(event, data)
+
+    event.add_property(name="Gear", value=gear_id)
+    event.add_property(name="Position", value=_get_positions(
+        data), datatype=db.common.datatype.LIST("Position"))  # type: ignore
+    if "Station ID" in data and return_value_if_not_none(data["Station ID"]) is not None:
+        event.add_property(name="Station ID", value=str(data["Station ID"]))
+    if "Station number" in data and return_value_if_not_none(data["Station number"]) is not None:
+        event.add_property(name="Station number", value=str(data["Station number"]))
+    if "Hol" in data and return_value_if_not_none(data["Hol"]) is not None:
+        event.add_property(name="Hol", value=str(data["Hol"]))
+    return event
+
+
+def _get_positions(data):
+    # TODO: Refactor
+    if "Latitude start" in data:
+        latitude_start = return_value_if_not_none(data["Latitude start"])
+    else:
+        latitude_start = None
+    if "Latitude stop" in data:
+        latitude_stop = return_value_if_not_none(data["Latitude stop"])
+    else:
+        latitude_stop = None
+    if "Longitude start" in data:
+        longitude_start = return_value_if_not_none(data["Longitude start"])
+    else:
+        longitude_start = None
+    if "Longitude stop" in data:
+        longitude_stop = return_value_if_not_none(data["Longitude stop"])
+    else:
+        longitude_stop = None
+    if "Sampling depth start" in data:
+        sampling_depth_start = return_value_if_not_none(data["Sampling depth start"])
+    else:
+        sampling_depth_start = None
+    if "Sampling depth stop" in data:
+        sampling_depth_stop = return_value_if_not_none(data["Sampling depth stop"])
+    else:
+        sampling_depth_stop = None
+    if "Water depth start" in data:
+        water_depth_start = return_value_if_not_none(data["Water depth start"])
+    else:
+        water_depth_start = None
+    if "Water depth stop" in data:
+        water_depth_stop = return_value_if_not_none(data["Water depth stop"])
+    else:
+        water_depth_stop = None
+    # insert start position
+    position_start = db.Record()
+    position_start.add_parent("StartPosition")
+    position_start.add_property(name="Latitude", value=latitude_start)
+    position_start.add_property(name="Longitude", value=longitude_start)
+    if not pd.isna(sampling_depth_start):
+        if sampling_depth_start < 0.0:
+            sampling_depth_start *= -1.0
+    # identifiable, so add even if it is None
+    position_start.add_property(name="Sampling depth", value=sampling_depth_start)
+    if not pd.isna(water_depth_start):
+        if water_depth_start < 0:
+            water_depth_start *= -1
+    # identifiable, so add even if it is None
+    position_start.add_property(name="Water depth", value=water_depth_start)
+
+    # A stop position may be specified by depth stop alone:
+    if not (pd.isna(sampling_depth_stop) and pd.isna(water_depth_stop)):
+        # Copy all empty info from start position
+        if pd.isna(latitude_stop) and pd.isna(longitude_stop):
+            latitude_stop = latitude_start
+            longitude_stop = longitude_start
+        if pd.isna(sampling_depth_stop):
+            sampling_depth_stop = sampling_depth_start
+        if pd.isna(water_depth_stop):
+            water_depth_stop = water_depth_start
+    # If there is an endposition: insert endposition
+    if not (pd.isna(latitude_stop) or pd.isna(longitude_stop)):
+
+        position_end = db.Record()
+        # position_end = db.Record("({}, {})".format(latitude_stop, longitude_stop))
+        position_end.add_parent("StopPosition")
+        position_end.add_property(name="Latitude", value=latitude_stop)
+        position_end.add_property(name="Longitude", value=longitude_stop)
+        if not pd.isna(sampling_depth_stop):
+            if sampling_depth_stop < 0:
+                sampling_depth_stop *= -1
+            # position_end.name = position_end.name + " at -{}m".format(sampling_depth_stop)
+        # identifiable, so add even if it is None
+        position_end.add_property(name="Sampling depth", value=sampling_depth_stop)
+        if not pd.isna(water_depth_stop):
+            if water_depth_stop < 0:
+                water_depth_stop *= -1
+        # identifiable, so add even if it is None
+        position_end.add_property(name="Water depth", value=water_depth_stop)
+        # position_end.insert(unique=False)
+        return [position_start, position_end]
+    else:
+        return [position_start]
+
+
+def get_gear(data):
+    """
+    Return the BIS ID of the Gear that is specified by 'Gear' and 'Gear configuration' in data.
+
+    If no Such Gear Record exists, a new Gear Record is created.
+    """
+
+    qtext = f"FIND RECORD '{data['Gear']}'"
+    if "Gear configuration" in data and pd.notnull(data["Gear configuration"]):
+        qtext += f" WITH 'Configuration'='{data['Gear configuration']}'"
+    try:
+        res = db.execute_query(qtext, unique=True)
+    except db.exceptions.EmptyUniqueQueryError:
+        raise DataInconsistencyError(f"The query\n{qtext}\nreturned no results.")
+    except db.exceptions.QueryNotUniqueError:
+        raise DataInconsistencyError(f"The query\n{qtext}\nreturned  more than one result.")
+    return res
+
+
+def get_nagoya_case(data):
+    """Create and retrun a NagoyaCase Record."""
+    nagoya_case_number = return_value_if_not_none(data["Nagoya case number"])
+
+    nagoya_case = db.Record(nagoya_case_number)
+    nagoya_case.add_parent(name="NagoyaCase")
+    nagoya_case.add_property(name="Nagoya Case Number", value=nagoya_case_number)
+
+    return nagoya_case
+
+
+def get_person(text) -> db.Record:
+    """
+    Return the BIS ID of the person that is specifed as 'Main User' or 'Sampling Person' in data.
+
+    If the Person is not present in the database, an Exception is raised. Creating new Person Reconrds can only be done by a priviledged user.
+    """
+    # Check in which format the person is identified:
+    person_identifier = text.split(", ")
+    if len(person_identifier) == 1:
+        person = _get_person_by_abbreviation(person_identifier[0])
+    else:
+        person = _get_person_by_fullname(person_identifier[1], person_identifier[0])
+
+    return person
+
+
+def _get_person_by_fullname(first_name, last_name):
+    # seach for person in db
+    res = db.execute_query(
+        "FIND RECORD Person WITH 'First name' = '{}' AND 'Last name' = '{}'".format(first_name, last_name))
+    # if person doesn't exist in database...
+    if len(res) == 0:
+        # There is not enought data in the template to create a new Person record. Hence, we have to raise an Exception
+        error_msg = "There is no person Record with 'First name' = '{}' AND 'Last name' = '{}' in the database. ".format(
+            first_name, last_name)
+        raise DataInconsistencyError(error_msg)
+    else:
+        return res[0]
+
+
+def _get_person_by_abbreviation(abbreviation):
+    # seach for person in db
+    res = db.execute_query("FIND RECORD Person WITH 'Abbreviation' = '{}'".format(abbreviation))
+    # if person doesn't exist in database...
+    if len(res) == 0:
+        # There is not enought data in the template to create a new Person record. Hence, we have to raise an Exception
+        error_msg = "There is no Person Record with Abbreviation = '{}'".format(abbreviation)
+        raise DataInconsistencyError(error_msg)
+    else:
+        return res[0]
+
+
+def synchroize(records, additional_property_ents, htmluserlog_public):
+    crawler = Crawler(securityMode=SecurityMode.UPDATE)
+    identifiables_definition_file = os.path.expanduser("~/identifiables.yml")
+    ident = CaosDBIdentifiableAdapter()
+    ident.load_from_yaml_definition(identifiables_definition_file)
+    for property_name, entity in additional_property_ents.items():
+        if entity.role != "RecordType":
+            continue
+        if len(entity.properties) == 0:
+            ident.register_identifiable(
+                name=entity.name,
+                definition=db.RecordType().add_parent(entity.name).add_property(name="name"))
+        else:
+            ident.register_identifiable(
+                name=entity.name,
+                definition=db.RecordType().add_parent(entity.name).add_property(
+                    name=entity.properties[0].name))
+    crawler.identifiableAdapter = ident
+
+    inserts, updates = crawler.synchronize(commit_changes=True, unique_names=False,
+                                           crawled_data=records,
+                                           no_insert_RTs=get_do_not_insert_type_names(),
+                                           no_update_RTs=None,
+                                           )
+    if "SHARED_DIR" in os.environ:
+        _notify_about_inserts_and_updates(len(inserts), len(updates), htmluserlog_public,
+                                          crawler.run_id)
+
+
+def update_sample_records(data, htmluserlog_public):
+    logger.info("Starting sample updates...")
+
+    # TODO Check data first and if there are Errors in the data: Provide the user with a download
+    # link to a template with Error descriptions.
+
+    # Get property ids:
+    person_property_id = db.get_entity_by_name("Main User").id
+    sampling_person_property_id = db.get_entity_by_name("Sampling Person").id
+    nagoya_case_property_id = get_id_of_datatype("NagoyaCase")
+    container_property_id = get_id_of_datatype("Container")
+    event_property_id = get_id_of_datatype("SourceEvent")
+    pdfreport_property_id = get_id_of_datatype("PDFReport")
+    parent_sample_property_id = db.get_entity_by_name("Parent Sample").id
+
+    additional_properties = data.keys().to_list()
+    additional_property_ids = {}  # name-> id
+    additional_property_ents = {}  # name-> Entity
+    for property_name in additional_properties:
+        if property_name in SPECIAL_TREATMENT_SAMPLE or _is_ignored_column_name(property_name):
+            continue
+        try:
+            try:
+                res = cached_get_entity_by(query=f"FIND PROPERTY WITH name='{property_name}'")
+            except db.EmptyUniqueQueryError:
+                res = cached_get_entity_by(query=f"FIND RECORDTYPE WITH name='{property_name}'")
+            additional_property_ids[property_name] = res.id
+            additional_property_ents[property_name] = res
+        except db.exceptions.EmptyUniqueQueryError:
+            logger.info(f"Couldn't find (unique) Property or RecordType: '{property_name}'."
+                        f"\nThe column '{property_name}' is not being used.")
+        except db.QueryNotUniqueError:
+            logger.info(f"Property or RecordType {property_name} was not unique. "
+                        "Skipping this column.")
+
+    # Create everything needed to update the samples
+    samples = []
+
+    for index, row in data.iterrows():
+
+        sample_id_exists = not pd.isnull(row["BIS ID"])
+        if not sample_id_exists:
+            raise DataInconsistencyError(f"Missing sample ID in row {index}")
+        try:
+            sample = db.execute_query(
+                "FIND RECORD Sample WITH id = {}".format(row["BIS ID"]), unique=True)
+        except db.exceptions.EmptyUniqueQueryError:
+            msg = "There is no Sample with ID = {} in the system.".format(row["BIS ID"])
+            raise DataInconsistencyError(msg)
+
+        sample = _update_property(entity=sample, property_id=person_property_id,
+                                  property_name="Main User",
+                                  value=get_person(row["Main User"]))
+        if "Parent BIS ID" in row and return_value_if_not_none(row["Parent BIS ID"]) is not None:
+            sample = _update_property(entity=sample, property_id=parent_sample_property_id,
+                                      value=row["Parent BIS ID"])
+        if ("Sampling Person" in row
+                and return_value_if_not_none(row["Sampling Person"]) is not None):
+            sample = _update_property(entity=sample, property_id=sampling_person_property_id,
+                                      property_name="Sampling Person",
+                                      value=get_person(row["Sampling Person"]))
+        if "PI" in row and return_value_if_not_none(row["PI"]) is not None:
+            sample = _update_property(entity=sample, property_id=db.get_entity_by_name("PI").id,
+                                      property_name="PI",
+                                      value=get_person(row["PI"]))
+        if "Nagoya case number" in row and return_value_if_not_none(row["Nagoya case number"]) is not None:
+            sample = _update_property(entity=sample, property_id=nagoya_case_property_id,
+                                      property_name="NagoyaCase",
+                                      value=get_nagoya_case(row))
+        if "Storage ID" in row and return_value_if_not_none(row["Storage ID"]) is not None:
+            sample = _update_property(entity=sample, property_id=container_property_id,
+                                      property_name="Container",
+                                      value=get_container(row))
+        if "Collection" in row and return_value_if_not_none(row["Collection"]) is not None:
+            sample = _update_property(entity=sample,
+                                      property_id=db.get_entity_by_name("Collection").id,
+                                      property_name="Collection",
+                                      datatype=db.LIST("Collection"),
+                                      value=[db.Record(name=el)
+                                             .add_parent(name="Collection")
+                                             for el in row["Collection"]])
+        if "Sphere" in row and return_value_if_not_none(row["Sphere"]) is not None:
+            sample = _update_property(entity=sample,
+                                      property_id=db.get_entity_by_name("Sphere").id,
+                                      property_name="Sphere",
+                                      datatype=db.LIST("Sphere"),
+                                      value=[db.Record(name=el)
+                                             .add_parent(name="Sphere")
+                                             for el in row["Sphere"]])
+
+        if "Date collected start" in row and return_value_if_not_none(row["Date collected start"]) is not None:
+            sample = _update_property(entity=sample, property_id=event_property_id, property_name='SourceEvent', value=get_event(
+                row, get_gear(row)))
+        if "PDFReport" in data.columns:
+            sample = _update_property(
+                entity=sample, property_id=pdfreport_property_id, property_name="PDFReport", value=row["PDFReport"])
+        if "Date sampled start" in data.columns:
+            sample = _append_times_to_entity(ent=sample, data=row, propname_prefix="Time sampled",
+                                             colname_time_prefix="Time sampled", colname_date_prefix="Date sampled")
+
+        # Add additional properties
+        for property_name in additional_property_ids.keys():
+            if return_value_if_not_none(row[property_name]) is None or (isinstance(row[property_name], list) and
+                                                                        len(row[property_name]) == 0):
+                continue
+            ent = additional_property_ents[property_name]
+            if ent.role == "RecordType":
+                value = db.Record().add_parent(ent.name)
+                if len(ent.properties) > 1:
+                    raise DataInconsistencyError(
+                        f"Trying to add a {ent.name} to a sample. Cannot identify property to set "
+                        f"because RecordType with ID={ent.id} has more than one Property.")
+                if len(ent.properties) == 0:
+                    value.name = return_value_if_not_none(row[property_name])
+                else:
+                    value.add_property(
+                        name=ent.properties[0].name, value=return_value_if_not_none(row[property_name]))
+            else:
+                value = return_value_if_not_none(row[property_name])
+            sample = _update_property(
+                entity=sample, property_id=additional_property_ids[property_name],
+                value=value, property_name=property_name)
+
+        samples.append(sample)
+
+    synchroize(samples, additional_property_ents, htmluserlog_public)
+
+
+def main():
+
+    parser = helper.get_argument_parser()
+    parser.add_argument(
+        "pickled_sample_data",
+        help="Dump of the cleaned and checked sample data for crawling."
+    )
+    parser.add_argument(
+        "old_filename",
+        help="Name of the file that was uploaded originally for logging purposes."
+    )
+    args = parser.parse_args()
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(
+            auth_token=args.auth_token,
+            timeout=(30, 60*60*24*7)  # Rather short connection timeout, one week for read.
+        )
+        userlog_public, htmluserlog_public, debuglog_public = configure_server_side_logging()
+    else:
+        rootlogger = logging.getLogger()
+        rootlogger.setLevel(logging.INFO)
+        logger.setLevel(logging.DEBUG)
+        handler = logging.StreamHandler(stream=sys.stdout)
+        handler.setLevel(logging.DEBUG)
+        rootlogger.addHandler(handler)
+        userlog_public = "/tmp/upload_sample_userlog.log"
+        htmluserlog_public = "/tmp/upload_sample_userlog.html"
+        debuglog_public = "/tmp/upload_sample_debuglog.html"
+
+    try:
+        sample_data = pd.read_pickle(args.pickled_sample_data)
+    except (FileNotFoundError, UnpicklingError) as err:
+        email_body = f"""
+Dear curator,
+
+There were problems transferring the read-in CSV data from
+{args.old_filename} to the asynchronous crawl script:
+
+{str(err)}
+
+        """
+        _notify_about_error(
+            subject=f"Errors when loading {args.old_filename}",
+            text=email_body
+        )
+        return 2
+
+    try:
+        update_sample_records(sample_data, htmluserlog_public)
+    except db.TransactionError as te:
+        email_body = f"""
+Dear curator,
+
+There were problems synchronizing the sample entities from {args.old_filename} to the LinkAhead server:
+
+{te}
+        """
+        _notify_about_error(
+            subject=f"Errors when synchronoizing {args.old_filename}",
+            text=email_body
+        )
+        return 3
+
+    except DataInconsistencyError as die:
+
+        email_body = f"""
+Dear Curator,
+
+There were problems with the data in {args.old_filename}:
+
+{die}
+
+Please check for mistakes like typos in names or ids, wrong data
+types, or missing information.
+        """
+        _notify_about_error(
+            subject=f"Parsing errors in {args.old_filename}",
+            text=email_body
+        )
+        return 4
+
+    except ForbiddenTransaction as fte:
+        email_body = f"""
+Dear Curator,
+
+Crawling {args.old_filename} resulted in forbidden transactions:
+
+{fte}
+        """
+        _notify_about_error(
+            subject=f"Forbidden transactions in {args.old_filename}",
+            text=email_body
+        )
+        return 5
+
+    except ImpossibleMergeError as ime:
+        email_body = f"""
+Dear Curator,
+
+There was a conflict when merging sample or event information in {args.old_filename}:
+
+{ime}
+
+Please verify that all information that there is no contradictory
+information belonging to a single entity.
+        """
+        _notify_about_error(
+            subject=f"Merge conflict in {args.old_filename}",
+            text=email_body
+        )
+        return 6
+
+
+if __name__ == "__main__":
+
+    sys.exit(main())
diff --git a/sample-management-custom/caosdb-server/scripting/bin/export_container_csv.py b/sample-management-custom/caosdb-server/scripting/bin/export_container_csv.py
new file mode 100755
index 0000000000000000000000000000000000000000..a23c47a849941e089d81f1b8afccf26e4ef02b98
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/export_container_csv.py
@@ -0,0 +1,364 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2023 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Timm Fitschen <t.fitschen@indiscale.com>
+# Copyright (C) 2023 Florian Spreckelsen
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+import json
+import logging
+import os
+import sys
+
+import linkahead as db
+
+from caosadvancedtools.serverside import helper
+from caosadvancedtools.table_export import BaseTableExporter
+from caoscrawler.logging import configure_server_side_logging
+from linkahead.cached import cached_get_entity_by, cached_query as cquery
+from linkahead.exceptions import (EmptyUniqueQueryError,
+                                  QueryNotUniqueError)
+
+from bis_utils import (create_email_with_link_text,
+                       get_description_row, get_email_from_username,
+                       send_mail_with_defaults)
+
+# suppress warning of diff function
+apilogger = logging.getLogger("caosdb.apiutils")
+apilogger.setLevel(logging.ERROR)
+
+logger = logging.getLogger("caosadvancedtools")
+
+CACHE = {}
+
+ERROR_PREFIX = 'Something went wrong: '
+ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+
+
+def cached_query(query, unique=False):
+    """Wrapper for cached queries that may be unique."""
+    if unique:
+        return cached_get_entity_by(query=query)
+    return cquery(query)
+
+
+def reverse_semicolon_separated_list(value):
+    if isinstance(value, list):
+        return ";".join([str(val) for val in value])
+    else:
+        return value
+
+
+def generate_label_text(entity):
+
+    if entity.get_property("Custom label") is not None and entity.get_property("Custom label").value:
+        label = entity.get_property("Custom label").value
+    else:
+        label = entity.get_property("BIS label").value if entity.get_property(
+            "BIS label") is not None else None
+
+    if not label:
+        if not entity.name:
+            return entity.id
+        return f"{entity.id} {entity.name}"
+    if f"{label}" == f"{entity.id}":
+        # prevent special case of BisLabel = BisID resulting in a preview
+        # that looks like "id, id".
+        return entity.id
+    return f"{entity.id} {label}"
+
+
+def extract_parent_container(record, key):
+
+    possible_parents = cached_query(f"FIND Container WHICH REFERENCES {record.id}")
+
+    values = []
+    for par in possible_parents:
+        if par.name:
+            values.append(par.name)
+        else:
+            values.append(par.id)
+    return reverse_semicolon_separated_list(values)
+
+
+def extract_bis_id(record, key):
+
+    return record.id
+
+
+def extract_custom_label(record, key):
+
+    if record.get_property("Custom label") is not None:
+
+        return record.get_property("Custom label").value
+
+    return None
+
+
+def extract_pi(record, key):
+
+    if record.get_property("PI") is not None and record.get_property("PI").value is not None:
+
+        pi_id = record.get_property("PI").value
+        pi_rec = cached_query(f"FIND RECORD Person WITH ID={pi_id}", unique=True)
+
+        if pi_rec.get_property("Abbreviation") is not None and pi_rec.get_property("Abbreviation").value is not None:
+            return pi_rec.get_property("Abbreviation").value
+        return pi_id
+
+    return None
+
+
+def extract_collection(record, key):
+
+    if record.get_property("Collection") is not None and record.get_property("Collection").value is not None:
+
+        collections = record.get_property("Collection").value
+        if not isinstance(collections, list):
+            collections = [collections]
+
+        values = []
+        for coll in collections:
+            cr = cached_query(f"FIND RECORD Collection WITH ID={coll}", unique=True)
+            if cr.name is not None and cr.name != "":
+                values.append(cr.name)
+            else:
+                values.append(cr.id)
+        return reverse_semicolon_separated_list(values)
+    return None
+
+
+def extract_container_type(record, key):
+
+    if record.get_property("ContainerType") is not None and record.get_property("ContainerType").value is not None:
+        ct_id = record.get_property("ContainerType").value
+        ctr = cached_query(f"FIND ContainerType WITH ID={ct_id}", unique=True)
+        if ctr.get_property("Abbreviation") is not None and ctr.get_property("Abbreviation").value is not None:
+            return ctr.get_property("Abbreviation").value
+        return ct_id
+    return None
+
+
+def extract_storage_chain(record, key):
+
+    def find_referencing_containers(eid):
+
+        containing_containers = []
+        candidates = cached_query(f"FIND CONTAINER WHICH REFERENCES {eid}")
+        if len(candidates) > 1:
+            logger.debug(f"Entity {eid} is referenced by more than one container.")
+            return []
+        elif len(candidates) == 1:
+            containing_containers.extend(find_referencing_containers(candidates[0].id))
+        containing_containers.extend(candidates)
+        return containing_containers
+
+    containing_containers = find_referencing_containers(record.id)
+    containing_containers.append(record)
+
+    return " → ".join([str(generate_label_text(cont)) for cont in containing_containers])
+
+
+def default_extractor(record, key):
+
+    if record.get_property(key) is not None:
+        return record.get_property(key).value
+    else:
+        return None
+
+
+EXTRACTORS = {
+    "BIS ID": extract_bis_id,
+    "BIS label": default_extractor,
+    "Collection": extract_collection,
+    "Container Contents": default_extractor,
+    "Container size": default_extractor,
+    "Container type": extract_container_type,
+    "Custom label": extract_custom_label,
+    "PDFReport": default_extractor,
+    "PI": extract_pi,
+    "Parent container": extract_parent_container,
+    "Storage chain": extract_storage_chain,
+}
+
+# List of sample properties to be ignored because they are treated
+# otherwise. Similar, but not identical to SPECIAL TREATMENT.
+IGNORE_KEYS = [
+    "Child container",  # To be changed by updating the child, not the parent.
+    "ContainerType",  # handled separately above
+    "Responsible",  # Not to be changed via upload
+]
+
+
+def extract_value(r, e):
+    if e in EXTRACTORS:
+        v = EXTRACTORS[e](r, e)
+    else:
+        v = default_extractor(r, e)
+    if isinstance(v, str) and (',' in v or '\n' in v):
+        # Quote text fields with commas in them
+        v = f"\"{v}\""
+
+    return v if v is not None else ""
+
+
+class TableExporter(BaseTableExporter):
+    pass
+
+
+def retrieve_containers(data):
+
+    container = []
+    not_found = []
+    for bis_id in data:
+        if isinstance(bis_id, int):
+            try:
+                container.append(
+                    cached_query(
+                        f"FIND RECORD Container WITH id = '{bis_id}'",
+                        unique=True))
+            except EmptyUniqueQueryError as e:
+                # we want to warn about these
+                not_found.append(bis_id)
+        else:
+            found_at_least_one_in_range = False
+            for next_bis_id in bis_id:
+                try:
+                    container.append(
+                        cached_query(
+                            f"FIND RECORD Container WITH id = '{next_bis_id}'",
+                            unique=True))
+                    found_at_least_one_in_range = True
+                except EmptyUniqueQueryError as e:
+                    pass
+            if not found_at_least_one_in_range:
+                not_found.append(f"{bis_id.start}-{bis_id.stop-1}")
+    return container, not_found
+
+
+def to_csv(containers):
+
+    export_dict = {}
+    for key in EXTRACTORS:
+        export_dict[key] = {}
+
+    lower_case_keys = [e.lower() for e in export_dict]
+    for c in containers:
+        # collect other properties
+        for p in c.get_properties():
+            if not p.name.lower() in lower_case_keys and not p.name.lower() in [ign.lower() for ign in IGNORE_KEYS]:
+                export_dict[p.name] = {}
+                lower_case_keys.append(p.name.lower())
+
+    for e in export_dict:
+        export_dict[e]["find_func"] = extract_value
+        export_dict[e]["optional"] = True
+
+    keys = [e for e in export_dict]
+    csv = []
+    for c in containers:
+        table_exporter = TableExporter(export_dict, record=c)
+        table_exporter.all_keys = keys
+        table_exporter.collect_information()
+        logger.debug('<code>' + str(table_exporter.info) + '</code>')
+
+        csv.append(table_exporter.prepare_csv_export(print_header=False))
+
+    header = ",".join(keys) + "\n"
+    header += ",".join(get_description_row(keys)) + '\n'
+    header += ",".join(get_options_row(keys)) + '\n'
+    return header + "\n".join(csv)
+
+
+def write_csv(file_name, csv):
+
+    display_path, internal_path = helper.get_shared_filename(file_name)
+    with open(internal_path, "w") as csv_file:
+        csv_file.write(csv)
+    return display_path
+
+
+def main():
+    parser = helper.get_argument_parser()
+    args = parser.parse_args()
+    # Check whether executed locally or as an SSS depending on
+    # auth_token argument.
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+        debug_file = configure_server_side_logging()
+    else:
+        rootlogger = logging.getLogger()
+        rootlogger.setLevel(logging.INFO)
+        logger.setLevel(logging.DEBUG)
+        handler = logging.StreamHandler(stream=sys.stdout)
+        handler.setLevel(logging.DEBUG)
+        rootlogger.addHandler(handler)
+        debug_file = "/tmp/upload_sample_debug.log"
+
+    if hasattr(args, "filename") and args.filename:
+        # Read the input from the form (form.json)
+        with open(args.filename) as form_json:
+            form_data = json.load(form_json)
+
+            tmp = form_data["bis_ids"].split(",")
+            data = []
+            for d in tmp:
+                if "-" in d:
+                    bound = [int(b) for b in d.split("-")]
+                    data.append(range(min(bound), max(bound) + 1))
+                else:
+                    data.append(int(d.strip()))
+
+        containers, not_found = retrieve_containers(data)
+
+        if len(containers) == 0:
+            logger.error("No containers in the given range.")
+            return
+
+        for s in containers:
+            logger.debug("Found container " + str(s.id))
+        for s in not_found:
+            logger.warning("No containers found: " + str(s))
+
+        csv = to_csv(containers)
+
+        max_id = max([c.id for c in containers])
+        min_id = min([c.id for c in containers])
+        file_name = f"containers_export_(IDs_{min_id}_to_{max_id}).csv"
+        display_path = write_csv(file_name, csv)
+        logger.info("Your CSV-Export has been prepared successfully.\n" +
+                    f"Download the file <a href=/Shared/{display_path}>here</a>.")
+        try:
+            send_mail_with_defaults(
+                to=get_email_from_username(),
+                subject=f"BIS container export {file_name}",
+                body=create_email_with_link_text("container export", display_path)
+            )
+        except KeyError as ke:
+            logger.error(
+                "There is a problem with the server's email configuration:\n\n"
+                f"{ke}\n\nPlease contact your admin."
+            )
+    else:
+        msg = "{}export_sample_csv.py was called without the JSON file in args.{}".format(
+            ERROR_PREFIX, ERROR_SUFFIX)
+        logger.error(msg)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py b/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py
new file mode 100755
index 0000000000000000000000000000000000000000..4f47b48eb46557cc49e5e77d9dcef25a7ac15445
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py
@@ -0,0 +1,763 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2023 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Timm Fitschen <t.fitschen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import json
+import logging
+import os
+import sys
+import urllib
+from datetime import date, datetime
+from typing import List
+
+import linkahead as db
+import pandas as pd
+from caosadvancedtools.datainconsistency import DataInconsistencyError
+from caosadvancedtools.serverside import helper
+from caosadvancedtools.table_export import BaseTableExporter
+from caosadvancedtools.table_importer import CSVImporter
+from caoscrawler import Crawler, SecurityMode
+from caoscrawler.crawl import ForbiddenTransaction
+from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
+from caoscrawler.logging import configure_server_side_logging
+from linkahead.cached import cached_get_entity_by, cached_query as cquery
+from linkahead.common.datatype import get_id_of_datatype
+from linkahead.exceptions import (EmptyUniqueQueryError, QueryNotUniqueError,
+                                  TransactionError)
+from dateutil import parser as dateparser
+from dateutil.parser import isoparse
+
+from bis_utils import (create_email_with_link_text,
+                       get_description_row, get_email_from_username,
+                       get_options_row, send_mail_with_defaults,
+                       SPECIAL_TREATMENT_SAMPLE as SPECIAL_TREATMENT)
+from export_container_csv import (generate_label_text,
+                                  extract_storage_chain as container_storage_chain)
+from upload_sample_template import DATATYPE_DEFINITIONS
+
+# suppress warning of diff function
+apilogger = logging.getLogger("linkahead.apiutils")
+apilogger.setLevel(logging.ERROR)
+
+logger = logging.getLogger("caosadvancedtools")
+
+ERROR_PREFIX = 'Something went wrong: '
+ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+
+
+# Declare mapping of different time/date column names to property names
+TIME_NAME_MAPPINGS = {
+    "date collected start": "time start",
+    "time collected start": "time start",
+    "date collected stop": "time stop",
+    "time collected stop": "time stop",
+    "date sampled start": "time sampled start",
+    "time sampled start": "time sampled start",
+    "date sampled stop": "time sampled stop",
+    "time sampled stop": "time sampled stop",
+}
+
+
+def cached_record(i):
+
+    return cached_get_entity_by(eid=i)
+
+
+def cached_query(query, unique=False):
+
+    if unique:
+        return cached_get_entity_by(query=query)
+
+    return cquery(query)
+
+
+def reverse_semicolon_separated_list(value):
+    if isinstance(value, list):
+        return ";".join([str(val) for val in value if val is not None])
+    else:
+        return value
+
+
+def collection_value(vals):
+    return reverse_semicolon_separated_list(vals)
+
+
+def person_value(vals):
+    return reverse_semicolon_separated_list(vals)
+
+
+def retrieve_values(ids, property_name):
+    properties = [cached_query(
+        f"SELECT '{property_name}' FROM ENTITY WITH id = '{i}'",
+        unique=True).get_property(property_name) for i in ids if i is not None]
+    return [p.value for p in properties if p is not None]
+
+
+def get_enum_value(values):
+    values = values if isinstance(values, list) else [values]
+    referenced = [cached_record(i) for i in values if i is not None]
+    results = []
+    for e in referenced:
+        if e.get_property("enumValue") is not None and e.get_property("enumValue").value is not None and len(e.get_property("enumValue").value) > 0:
+            results.append(e.get_property("enumValue").value)
+        elif e.name is not None and len(e.name) > 0:
+            results.append(e.name)
+        else:
+            results.append(e.id)
+    return results
+
+
+def default_find(r, e):
+    p = r.get_property(e)
+    if p is not None and p.value is not None and p.is_reference():
+        return get_enum_value(p.value)
+    v = p.value if p is not None else None
+    return v
+
+
+def extract_value_as_list(record, key):
+    p = record.get_property(key)
+    values = p.value if p is not None else []
+    if not isinstance(values, list):
+        if values is None:
+            return []
+        values = [values]
+    return values
+
+
+def extract_storage_id(record, key):
+    return extract_value_as_list(record, "Container")
+
+
+def extract_pdf_id(record, key):
+    prop = record.get_property(key)
+    return prop.value if prop is not None else None
+
+
+def extract_storage_container_label(record, key):
+    ids = extract_value_as_list(record, "Container")
+    return retrieve_values(ids, 'BIS Label')
+
+
+def extract_nagoya_case_number(record, key):
+    ids = extract_value_as_list(record, "NagoyaCase")
+    return retrieve_values(ids, key)
+
+
+def extract_person(record, key):
+    ids = extract_value_as_list(record, key)
+    return retrieve_values(ids, 'Abbreviation')
+
+
+def extract_parent_sample(record, key):
+    p = record.get_property("Parent sample")
+    if p is not None:
+        return p.value
+
+
+def extract_reference_name(record, key):
+    ids = extract_value_as_list(record, key)
+    return [cached_query(f"SELECT 'name' FROM ENTITY WITH id = '{i}'", unique=True).name
+            for i in ids if i is not None]
+
+
+def retrieve_source_event(record):
+    ids = extract_value_as_list(record, 'SourceEvent')
+    if record.get_property("SourceEvent") is None:
+        # there are cases where this property is named "Event"
+        ids = extract_value_as_list(record, 'Event')
+    return [cached_record(i) for i in ids]
+
+
+def retrieve_gear(record):
+    ids = [e.get_property("Gear").value for e in retrieve_source_event(record)
+           if e.get_property("Gear") is not None]
+    return [cached_query(f"SELECT 'parent', 'Configuration' FROM ENTITY WITH id = '{i}'", unique=True) for i in ids]
+
+
+def extract_gear(record, key):
+    return [e.get_parents()[0].name for e in retrieve_gear(record)]
+
+
+def extract_gear_configuration(record, key):
+    return [e.get_property("Configuration").value for e in
+            retrieve_gear(record)
+            if e.get_property("Configuration") is not None]
+
+
+def extract_date_time(record, p):
+    if p.lower() == "time start" or p.lower() == "time stop":
+        # these are attached to the source event directly
+        return [e.get_property(p).value for e in retrieve_source_event(record) if
+                e.get_property(p) is not None and e.get_property(p).value is not None]
+    else:
+        return extract_value_as_list(record, p)
+
+
+def extract_date(record, key):
+    if key.lower() in TIME_NAME_MAPPINGS.keys():
+        time_key = TIME_NAME_MAPPINGS[key.lower()]
+    else:
+        time_key = key
+    date_times = extract_date_time(record, time_key)
+    return [isoparse(t).date().isoformat() for t in date_times]
+
+
+def extract_time(record, key):
+    if key.lower() in TIME_NAME_MAPPINGS.keys():
+        time_key = TIME_NAME_MAPPINGS[key.lower()]
+    else:
+        time_key = key
+    date_times = extract_date_time(record, time_key)
+    return [isoparse(t).time().isoformat() for t in date_times]
+
+
+def extract_time_zone(record, key):
+    date_times = extract_date_time(record, "Time start")
+    return [isoparse(t).tzname() for t in date_times]
+
+
+def extract_station_number(record, key):
+    source_ev = retrieve_source_event(record)
+    return [e.get_property(key).value for e in source_ev if
+            e.get_property(key) is not None]
+
+
+def extract_station_id(record, key):
+    source_ev = retrieve_source_event(record)
+    return [e.get_property(key).value for e in source_ev if
+            e.get_property(key) is not None]
+
+
+def retrieve_positions(source_ev):
+    pos_ids = extract_value_as_list(source_ev, "Position")
+    return [cached_record(i) for i in pos_ids]
+
+
+def has_parent(r, par):
+    pars = [p.name for p in r.get_parents()]
+    return par in pars
+
+
+def extract_position(record, position, component):
+    source_evs = retrieve_source_event(record)
+    result = []
+    for ev in source_evs:
+        _pos = [pos for pos in retrieve_positions(ev)]
+
+        old_pos = len([pos for pos in _pos if has_parent(pos, "Position")]) > 0
+        if old_pos:
+            if position == "StartPosition":
+                result.append(_pos[0])
+            elif len(_pos) > 1:
+                result.append(_pos[-1])
+        else:
+            result.extend([pos for pos in retrieve_positions(ev) if
+                           has_parent(pos, position)])
+    return [pos.get_property(component).value for pos in result if pos.get_property(component) is not None]
+
+
+def extract_lat_start(record, key):
+    return extract_position(record, "StartPosition", "Latitude")
+
+
+def extract_lat_stop(record, key):
+    return extract_position(record, "StopPosition", "Latitude")
+
+
+def extract_lng_start(record, key):
+    return extract_position(record, "StartPosition", "Longitude")
+
+
+def extract_lng_stop(record, key):
+    return extract_position(record, "StopPosition", "Longitude")
+
+
+def extract_sampling_depth_start(record, key):
+    return extract_position(record, "StartPosition", "Sampling depth")
+
+
+def extract_sampling_depth_stop(record, key):
+    return extract_position(record, "StopPosition", "Sampling depth")
+
+
+def extract_water_depth_start(record, key):
+    return extract_position(record, "StartPosition", "Water depth")
+
+
+def extract_water_depth_stop(record, key):
+    return extract_position(record, "StopPosition", "Water depth")
+
+
+def extract_source_event_name(record, key):
+    return [e.name for e in retrieve_source_event(record)]
+
+
+def extract_hol(record, key):
+    source_ev = retrieve_source_event(record)
+    return [e.get_property(key).value for e in source_ev if
+            e.get_property(key) is not None]
+
+
+def extract_bis_url(record, key):
+    # base_uri = db.get_config().get("Connection", "url")
+    base_uri = "https://biosamples.geomar.de/"
+    return urllib.parse.urljoin(base_uri, f"Entity/{record.id}")
+
+
+def extract_igsn(record, key):
+    source_evs = retrieve_source_event(record)
+    if len(source_evs) > 1:
+        logger.error(
+            f"Sample {record.id} references more than one SourceEvent so no unique IGSN can be exported.")
+        return None
+    elif len(source_evs) == 0:
+        return None
+    ev = source_evs[0]
+    return ev.get_property(key).value if ev.get_property(key) is not None else None
+
+
+def extract_doi(record, key):
+    source_evs = retrieve_source_event(record)
+    if len(source_evs) > 1:
+        logger.error(
+            f"Sample {record.id} references more than one SourceEvent so no unique DOI can be exported.")
+        return None
+    elif len(source_evs) == 0:
+        return None
+    ev = source_evs[0]
+    return ev.get_property("DOI").value if ev.get_property("DOI") is not None else None
+
+
+def extract_storage_chain(record, key):
+
+    if record.get_property("Container") is not None and record.get_property("Container").value:
+
+        cont_id = record.get_property("Container").value
+        if isinstance(cont_id, list):
+            if len(cont_id) > 1:
+                logger.debug(f"Sample {record.id} has multiple containers.")
+                return None
+            if len(cont_id) == 0:
+                return None
+            cont_id = cont_id[0]
+        container = cached_get_entity_by(eid=cont_id)
+        container_chain = container_storage_chain(container, key)
+        return f"{container_chain} → {generate_label_text(record)}"
+
+    return None
+
+
+def extract_event_url(record, key):
+
+    events = retrieve_source_event(record)
+    if not events:
+        return None
+    if len(events) == 1:
+        return urllib.parse.urljoin("https://biosamples.geomar.de", f"Entity/{events[0].id}")
+    logger.debug(f"Sample {record.id} has multiple events.")
+    return None
+
+
+# must be same keys as SPECIAL_TREATMENT
+EXTRACTORS = {
+    "BIS ID": lambda record, key: record.id,
+    "Parent BIS ID": extract_parent_sample,
+    "AphiaID": default_find,
+    "Collection": extract_reference_name,
+    "Date collected start": extract_date,
+    "Date collected stop": extract_date,
+    "Date sampled start": extract_date,
+    "Date sampled stop": extract_date,
+    "Main User": extract_person,
+    "Sampling Person": extract_person,
+    "PI": extract_person,
+    "Person": extract_person,
+    "Gear": extract_gear,
+    "Gear configuration": extract_gear_configuration,
+    "Latitude start": extract_lat_start,
+    "Longitude start": extract_lng_start,
+    "Storage ID": extract_storage_id,
+    "Nagoya case number": extract_nagoya_case_number,
+    "PDFReport": extract_pdf_id,
+    "Subevent": extract_source_event_name,
+    "Time collected start": extract_time,
+    "Time collected stop": extract_time,
+    "Time sampled start": extract_time,
+    "Time sampled stop": extract_time,
+    "Timezone": extract_time_zone,
+    "Station ID": extract_station_id,
+    "Station number": extract_station_number,
+    "Sampling depth start": extract_sampling_depth_start,
+    "Sampling depth stop": extract_sampling_depth_stop,
+    "Water depth start": extract_water_depth_start,
+    "Water depth stop": extract_water_depth_stop,
+    "Latitude stop": extract_lat_stop,
+    "Longitude stop": extract_lng_stop,
+    "Storage chain": extract_storage_chain,
+    "Storage Container Label": extract_storage_container_label,
+    "Hol": extract_hol,
+    "Sampling method": default_find,
+    # "Publications": TODO never used
+    # "NCBI BioProject": TODO never used
+    # "NCBI BioSample": TODO never used
+    # "NCBI Accession": TODO never used
+    "BIS URL": extract_bis_url,
+    "IGSN": extract_igsn,
+    "IGSN URL": extract_doi,
+    "Sphere": default_find,
+    "URL SourceEvent": extract_event_url,
+}
+
+REVERSE_COLUMN_CONVERTER = {
+    "Collection": collection_value,
+    "PI": person_value,
+    "Person": person_value,
+}
+
+# List of sample properties to be ignored because they are treated
+# otherwise. Similar, but not identical to SPECIAL TREATMENT.
+IGNORE_KEYS = [
+    "NagoyaCase",
+    "Parent Sample",
+    "Container",
+    "SourceEvent",
+    "Event",
+    "Date",
+    "Sampling depth",
+    "Water depth"
+]
+
+# Additional list of keys to be ignored when extracting parent sample information
+IGNORE_KEYS_PARENT = IGNORE_KEYS + [
+    "BIS ID",
+]
+
+# List of columns to be exported although they are not known to or ignored by
+# the import.
+ADDITIONAL_EXPORTS = [
+    "BIS URL",
+    "Date sampled start",
+    "Date sampled stop",
+    "IGSN URL",
+    "IGSN",
+    "Parent BIS ID",
+    "Storage chain",
+    "Time sampled start",
+    "Time sampled stop",
+    "URL SourceEvent"
+]
+
+
+def extract_value(r, e):
+    e = _extract_key_from_parent_key(e)
+    if e in EXTRACTORS:
+        v = EXTRACTORS[e](r, e)
+    else:
+        v = default_find(r, e)
+    if isinstance(v, str) and (',' in v or '\n' in v):
+        # Quote text fields with commas in them
+        v = f"\"{v}\""
+    return v if v is not None else ""
+
+
+class TableExporter(BaseTableExporter):
+    pass
+
+
+def _extract_key_from_parent_key(parent_key, parent_suffix="_parent"):
+
+    while parent_key.endswith(parent_suffix):
+        parent_key = parent_key[:-len(parent_suffix)]
+
+    return parent_key
+
+
+def gather_parent_information(parent_id, export_dict, level=1, parent_suffix="_parent"):
+
+    # TODO: recursively go through parent samples, export their
+    parent_dict = {}
+    for key, val in export_dict.items():
+        if key.lower() not in [ign.lower() for ign in IGNORE_KEYS_PARENT]:
+            parent_dict[key+parent_suffix*level] = val
+    parent_rec = cached_get_entity_by(eid=parent_id)
+    table_exporter = TableExporter(parent_dict, record=parent_rec)
+    table_exporter.keys = [e for e in parent_dict]
+    table_exporter.collect_information()
+    for e, d in table_exporter.export_dict.items():
+        if _extract_key_from_parent_key(e, parent_suffix) in REVERSE_COLUMN_CONVERTER:
+            table_exporter.info[e] = REVERSE_COLUMN_CONVERTER[_extract_key_from_parent_key(
+                e, parent_suffix)](table_exporter.info[e])
+        else:
+            table_exporter.info[e] = reverse_semicolon_separated_list(table_exporter.info[e])
+
+    parent_info = table_exporter.prepare_csv_export(print_header=False)
+    parent_keys = list(parent_dict.keys())
+    if parent_rec.get_property("Parent sample") is not None and parent_rec.get_property("Parent sample").value is not None:
+        if isinstance(parent_rec.get_property("Parent sample").value, list):
+            logger.warning(
+                f"Sample {parent_rec.id} has multiple parent samples. Export not supported, skipping.")
+        else:
+            next_parent_info, next_parent_keys = gather_parent_information(
+                parent_rec.get_property("Parent sample").value, export_dict, level=level+1)
+            parent_info += next_parent_info
+            parent_keys += next_parent_keys
+
+    if len(parent_info) > 0:
+        return ',' + parent_info, parent_keys
+
+    return '', []
+
+
+def to_csv(samples):
+
+    export_dict = {}
+    for c in DATATYPE_DEFINITIONS:
+        export_dict[c] = {}
+    for c in ADDITIONAL_EXPORTS:
+        export_dict[c] = {}
+
+    lower_case_keys = [e.lower() for e in export_dict]
+
+    for s in samples:
+        # collect other properties
+        for p in s.get_properties():
+            if (not p.name.lower() in lower_case_keys
+                    and not p.name.lower() in [ign.lower() for ign in IGNORE_KEYS]):
+                export_dict[p.name] = {}
+                lower_case_keys.append(p.name.lower())
+
+    for c in export_dict:
+        export_dict[c]["find_func"] = extract_value
+        export_dict[c]["optional"] = True
+
+    keys = [e for e in export_dict]
+    csv = []
+    parent_csv_keys = []
+    for s in samples:
+        table_exporter = TableExporter(export_dict, record=s)
+        table_exporter.all_keys = keys
+        table_exporter.collect_information()
+        logger.debug('<code>' + str(table_exporter.info) + '</code>')
+
+        # Post-processing to values (e.g. list to string)
+        for e, d in table_exporter.export_dict.items():
+            if e in table_exporter.info:
+
+                if e in REVERSE_COLUMN_CONVERTER:
+                    table_exporter.info[e] = REVERSE_COLUMN_CONVERTER[e](table_exporter.info[e])
+                else:
+                    table_exporter.info[e] = reverse_semicolon_separated_list(
+                        table_exporter.info[e])
+
+        sample_info = table_exporter.prepare_csv_export(print_header=False)
+        if s.get_property("Parent sample") is not None and s.get_property("Parent sample").value is not None:
+            if isinstance(s.get_property("Parent sample").value, list):
+                logger.warning(
+                    f"Sample {s.id} has multiple parent samples. Export not supported, skipping.")
+            else:
+                parent_info, parent_keys = gather_parent_information(
+                    s.get_property("Parent sample").value, export_dict, level=1)
+                # Save the longest parent keys
+                if len(parent_csv_keys) < len(parent_keys):
+                    parent_csv_keys = parent_keys
+                sample_info += parent_info
+        csv.append(sample_info)
+
+    # Extend header rows in case of parents
+    csv_keys = keys + parent_csv_keys
+    csv_descr = get_description_row([_extract_key_from_parent_key(k) for k in csv_keys])
+    csv_options = get_options_row([_extract_key_from_parent_key(k) for k in csv_keys])
+
+    return ",".join(csv_keys) + "\n" + ",".join(csv_descr) + '\n' + ",".join(csv_options) + '\n' + "\n".join(csv)
+
+
+def retrieve_samples(data):
+    container = []
+    not_found = []
+    for bis_id in data:
+        if isinstance(bis_id, int):
+            try:
+                container.append(
+                    cached_get_entity_by(query=f"FIND RECORD SAMPLE WITH id='{bis_id}'"))
+            except EmptyUniqueQueryError as e:
+                # we want to warn about these
+                not_found.append(bis_id)
+        else:
+            found_at_least_one_in_range = False
+            for next_bis_id in bis_id:
+                try:
+                    container.append(
+                        cached_get_entity_by(query=f"FIND RECORD Sample WITH id='{next_bis_id}'"))
+                    found_at_least_one_in_range = True
+                except EmptyUniqueQueryError as e:
+                    pass
+            if not found_at_least_one_in_range:
+                not_found.append(f"{bis_id.start}-{bis_id.stop-1}")
+    return container, not_found
+
+
+def sanity_check():
+    for key in SPECIAL_TREATMENT:
+        if not key in EXTRACTORS:
+            raise Exception(f"No extraction method defined for key '{key}.")
+
+
+def write_csv(file_name, csv, no_empty_columns):
+    """Write the csv data in ``csv`` to with given ``file_name`` to the shared
+    resource. Drop empy columns before writing if ``no_empty_columns`` is
+    ``True``.
+
+    """
+    display_path, internal_path = helper.get_shared_filename(file_name)
+    with open(internal_path, "w") as csv_file:
+        csv_file.write(csv)
+    if no_empty_columns:
+        # Pandas seems to have problems with commas and quotation marks in the
+        # description rows when loading the csv without ignoring comment
+        # lines. So we need to restore the descriptions manually further down
+        # the line.
+        tmp = pd.read_csv(internal_path, comment='#', dtype=str)
+        # drop all empty columns
+        tmp.dropna(axis=1, inplace=True, how="all")
+        # generate new description row and insert as the first "data row"
+        new_descriptions = get_description_row(
+            [_extract_key_from_parent_key(cname) for cname in tmp.columns])
+        description_row_dict = {cname: descr for (
+            cname, descr) in zip(tmp.columns, new_descriptions)}
+        tmp.loc[-1] = description_row_dict
+        tmp.index += 1
+        tmp.sort_index(inplace=True)
+        tmp.to_csv(internal_path, index=False)
+
+    return display_path
+
+
+def main():
+    sanity_check()
+    parser = helper.get_argument_parser()
+    args = parser.parse_args()
+    # Check whether executed locally or as an SSS depending on
+    # auth_token argument.
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+        debug_file = configure_server_side_logging()
+    else:
+        rootlogger = logging.getLogger()
+        rootlogger.setLevel(logging.INFO)
+        logger.setLevel(logging.DEBUG)
+        handler = logging.StreamHandler(stream=sys.stdout)
+        handler.setLevel(logging.DEBUG)
+        rootlogger.addHandler(handler)
+        debug_file = "/tmp/upload_sample_debug.log"
+
+    if hasattr(args, "filename") and args.filename:
+        # Read the input from the form (form.json)
+        with open(args.filename) as form_json:
+            form_data = json.load(form_json)
+
+            no_empty_columns = False
+            if "noEmpyColumns" in form_data and form_data["noEmpyColumns"] == "on":
+                logger.info("Removing empty columns from export")
+                no_empty_columns = True
+
+            if "from_date" in form_data:
+                # Inserted after ...
+                data = [el.id for el in db.execute_query(
+                    "SELECT id FROM sample WHICH REFERENCES A SourceEvent "
+                    "WHICH HAS AN IGSN AND "
+                    f"(WHICH WAS INSERTED SINCE {form_data['from_date']})")
+                ]
+                # ... + update after
+                data += [el.id for el in db.execute_query(
+                    "SELECT id FROM sample WHICH REFERENCES A SourceEvent "
+                    "WHICH HAS AN IGSN AND "
+                    f"(WHICH WAS UPDATED SINCE {form_data['from_date']})")
+                ]
+            elif "query_string" in form_data and form_data["query_string"]:
+                query_string = form_data["query_string"]
+                if not query_string.lower().startswith("find ") and not query_string.lower().startswith("select "):
+                    logger.error(
+                        f"The query '{query_string}' dosn't seem to be a valid select or find query.")
+                    return
+                if query_string.lower().startswith("find "):
+                    # transform to select query for performance
+                    query_string = "SELECT id FROM" + query_string[4:]
+                try:
+                    data = [el.id for el in db.execute_query(query_string)]
+                except db.TransactionError as te:
+                    logger.error(
+                        f"There is a problem with the given query '{query_string}':\n"
+                        f"```\n{str(te)}\n```"
+                    )
+                    return
+            else:
+                if not form_data["bis_ids"]:
+                    logger.error(
+                        "Please specify the samples to be exported either by query or by id(s).")
+                    return
+                tmp = form_data["bis_ids"].split(",")
+                data = []
+                for d in tmp:
+                    if "-" in d:
+                        bound = [int(b) for b in d.split("-")]
+                        data.append(range(min(bound), max(bound) + 1))
+                    else:
+                        data.append(int(d.strip()))
+
+        samples, not_found = retrieve_samples(data)
+
+        if len(samples) == 0:
+            logger.error("No samples in the given range.")
+            return
+
+        for s in samples:
+            logger.debug("Found sample " + str(s.id))
+        for s in not_found:
+            logger.warning("No samples found: " + str(s))
+
+        csv = to_csv(samples)
+
+        max_id = max([s.id for s in samples])
+        min_id = min([s.id for s in samples])
+        file_name = f"samples_export_(IDs_{min_id}_to_{max_id}).csv"
+        display_path = write_csv(file_name, csv, no_empty_columns)
+        logger.info("Your CSV-Export has been prepared successfully.\n" +
+                    f"Download the file <a href=/Shared/{display_path}>here</a>.")
+        try:
+            send_mail_with_defaults(
+                to=get_email_from_username(),
+                subject=f"BIS sample export {file_name}",
+                body=create_email_with_link_text("sample export", display_path)
+            )
+        except KeyError as ke:
+            logger.error(
+                "There is a problem with the server's email configuration:\n\n"
+                f"{ke}\n\nPlease contact your admin."
+            )
+    else:
+        msg = "{}export_sample_csv.py was called without the JSON file in args.{}".format(
+            ERROR_PREFIX, ERROR_SUFFIX)
+        logger.error(msg)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sample-management-custom/caosdb-server/scripting/bin/register_new_containers.py b/sample-management-custom/caosdb-server/scripting/bin/register_new_containers.py
new file mode 100755
index 0000000000000000000000000000000000000000..f64e9341540ccc81506f1878c4f1c3048f21da43
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/register_new_containers.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 - 2023 GEOMAR
+# Copyright (C) 2022 Jakob Eckstein
+# Copyright (C) 2023 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import csv
+import datetime
+import json
+import logging
+import sys
+
+import caosdb as db
+from caosadvancedtools.serverside import helper
+from caosadvancedtools.serverside.logging import configure_server_side_logging
+
+from bis_utils import (create_email_with_link_text,
+                       get_description_row, get_email_from_username,
+                       get_options_row, send_mail_with_defaults)
+
+
+ERROR_PREFIX = 'Something went wrong: '
+ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+BIS_URL = 'https://biosamples.geomar.de/Entity/'
+logger = logging.getLogger("caosadvancedtools")
+
+
+def get_parser():
+    par = helper.get_argument_parser()
+    # par.add_argument(
+    #     "ids", help="The id(s) of the sample record(s) to be exported.", type=int, metavar="id", nargs='*')
+    return par
+
+
+def get_current_year():
+    currentDateTime = datetime.datetime.now()
+    date = currentDateTime.date()
+    year = date.strftime("%Y")
+    return year
+
+
+def get_abbreviation(id):
+    try:
+        rd = db.execute_query("FIND {}".format(id))[0]
+        return rd.get_property("Abbreviation").value
+    except:
+        entity_url = BIS_URL + str(id)
+        logger.error('{}There was a problem while querying for the abbreviation of <a href="{}">this BIS Entity</a>.{}'.format(
+            ERROR_PREFIX, entity_url, ERROR_SUFFIX))
+
+
+def create_new_label_counter(container_type_id, value=1):
+    counter = db.Record()
+    counter.add_parent(name="LabelCounter")
+    counter.add_property(name="Counter", value=value)
+    counter.add_property(name="ContainerType", value=container_type_id)
+    counter.insert()
+
+
+def get_label_counter(container_type_id):
+    counter = db.execute_query(
+        "FIND LabelCounter WITH ContainerType = {}".format(container_type_id))
+    if len(counter) == 0:
+        create_new_label_counter(container_type_id)
+        return 1
+    else:
+        return counter[0].get_property("Counter").value
+
+
+def set_label_counter(container_type_id, value):
+    counter = db.execute_query(
+        "FIND LabelCounter WITH ContainerType = {}".format(container_type_id))
+    if len(counter) == 0:
+        create_new_label_counter(container_type_id, value)
+    else:
+        try:
+            counter[0].get_property("Counter").value = value
+            counter.update()
+        except:
+            logger.error('{}There was an error when updating the internal label counter {}.{}'.format(
+                ERROR_PREFIX, counter[0], ERROR_SUFFIX))
+
+
+def get_bis_label_prefix(responsible_entity):
+    """
+    @return The prefix for the BIS Label, i.e. the BIS lable without the container number.
+    """
+    try:
+        if responsible_entity.get_parent("Person") is not None:
+            research_unit = db.execute_query("FIND ResearchUnit WITH id = {}".format(
+                responsible_entity.get_property("ResearchUnit").value))[0]
+            return "{}_{}_{}".format(get_bis_label_prefix(research_unit), get_abbreviation(responsible_entity.id), get_current_year())
+        elif responsible_entity.get_parent("ResearchUnit") is not None:
+            research_division = db.execute_query("FIND ResearchDivision WITH id = {}".format(
+                responsible_entity.get_property("ResearchDivision").value))[0]
+            return "{}_{}".format(get_bis_label_prefix(research_division), get_abbreviation(responsible_entity.id))
+        else:
+            return "{}".format(get_abbreviation(responsible_entity.id))
+    except:
+        logger.error('{}The system could not auto-generate the BIS Label for {}.{}'.format(
+            ERROR_PREFIX, responsible_entity, ERROR_SUFFIX))
+
+
+def create_container_entities(responsible_id, container_type_id, container_size, number_of_containers, parent_container):
+    """
+    Create the BIS Entities specified via the "Register Containers" Form in
+    ext_bis_stockmanagement.js.
+    @return A list of the newly registered entities."""
+    # Create Batch for insert
+    batch = db.Container()
+    # Get BIS Label for responsible person
+    responsible_entity = db.execute_query("FIND {}".format(responsible_id))[0]
+    container_type_abbreviation = get_abbreviation(container_type_id)
+    bis_label_prefix = get_bis_label_prefix(responsible_entity)
+    bis_label_counter = get_label_counter(container_type_id)
+    for i in range(number_of_containers):
+        bis_label = "{}_{}_{}".format(
+            bis_label_prefix, container_type_abbreviation, bis_label_counter + i)
+        container = db.Record(bis_label)
+        container.add_parent(name="Container")
+        container.add_property(name="Responsible", value=responsible_id)
+        container.add_property(name="ContainerType", value=container_type_id)
+        container.add_property(name="Container size", value=container_size)
+        container.add_property(name="BIS label", value=bis_label)
+        container.add_property(name="Child container", value=[])
+        batch += [container]
+    try:
+        batch.insert()
+        for ent in batch:
+            ent.retrieve_acl()
+            ent.grant(role='Stock Manager', priority=False, permission="EDIT:ACL")
+            ent.update_acl()
+    except:
+        logger.error("{}The registered containers could not be imported to the system.{}".format(
+            ERROR_PREFIX, ERROR_SUFFIX))
+        return None
+        # TODO: Reset the internal_container_label_counter of person to its previous value if nothing has be inserted
+
+    # Add to parent container
+    if parent_container is not None:
+        new_childs = []
+        for entity in batch:
+            new_childs += [entity.id]
+        # Check if property Child container exists:
+        if parent_container.get_property("Child container") is None:
+            child_cont_prop = db.Property(name="Child container").retrieve()
+            parent_container.add_property(id=child_cont_prop.id, value=new_childs)
+        else:
+            # Check if the container allready has childs
+            if parent_container.get_property("Child container").value is None:
+                parent_container.get_property("Child container").value = new_childs
+            else:
+                parent_container.get_property("Child container").value += new_childs
+        parent_container.update()
+
+    set_label_counter(container_type_id=container_type_id,
+                      value=bis_label_counter + number_of_containers)
+    return batch
+
+
+def get_template_name(container_entities, file_format_extension):
+    first_id = container_entities[0].id
+    last_id = container_entities[-1].id
+    return "container_template_(IDs_{}_to_{}).{}".format(first_id, last_id, file_format_extension)
+
+
+def get_parent_container(id):
+    try:
+        parent = db.execute_query("FIND {}".format(id))[0]
+        return parent
+    except:
+        logger.info("No parent container specified. The BIS ID or BIS label of the parent container has to be entered in the template before it is uploaded.")
+        return None
+
+
+def get_container_type(id):
+    try:
+        type = db.execute_query("FIND {}".format(id))[0]
+        return type
+    except:
+        logger.error("{}The specified ContainerType could not be retrieved. {}".format(
+            ERROR_PREFIX, ERROR_SUFFIX))
+
+
+def create_csv_template(template_internal_path, container_entities, container_type, container_size, parent_container):
+    if parent_container is None:
+        parent_container_label = ""
+    else:
+        parent_container_label = parent_container.get_property("BIS label").value
+    with open(template_internal_path, 'w') as template_csv:
+        writer = csv.writer(template_csv)
+        # Write header
+        headers = [
+            "BIS ID",
+            "Container type",
+            "Container size",
+            "BIS label",
+            "PI",
+            "Collection",
+            "Parent container",
+            "Custom label",
+            "Container Contents",
+            "PDFReport"]
+        writer.writerow(headers)
+        # Write description with leading '#'
+        descriptions = get_description_row(headers)
+        if descriptions:
+            writer.writerow(descriptions)
+        options = get_options_row(headers)
+        if options:
+            writer.writerow(options)
+        # Write entity data to the lines
+        for entity in container_entities:
+            writer.writerow([
+                entity.id,
+                container_type.name,
+                container_size,
+                entity.get_property("BIS label").value,
+                "",
+                "",
+                parent_container_label,
+                "",
+                ""])
+
+
+def main():
+    parser = get_parser()
+    args = parser.parse_args()
+
+    global logger
+    # Check whether executed locally or as an SSS depending on
+    # auth_token argument.
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+        debug_file = configure_server_side_logging()
+        # logger = logging.getLogger("caosadvancedtools")
+    else:
+        # logging config for local execution
+        # logger = logging.getLogger("caosadvancedtools")
+        logger.addHandler(logging.StreamHandler(sys.stdout))
+        logger.setLevel(logging.DEBUG)
+        debug_file = None
+
+    if hasattr(args, "filename") and args.filename:
+        # Read the input from the form (form.json)
+        with open(args.filename) as form_json:
+            form_data = json.load(form_json)
+
+            parent_container = get_parent_container(form_data["parent_id"])
+            container_type = get_container_type(form_data["container_type"])
+            container_size = form_data["container_size"]
+
+            # Create entities in BIS
+            container_entities = create_container_entities(
+                responsible_id=int(form_data["responsible_entity"]),
+                container_type_id=int(form_data["container_type"]),
+                container_size=container_size,
+                number_of_containers=int(form_data["number_of_containers"]),
+                parent_container=parent_container
+            )
+
+            # In case there has been an error upon container creation
+            if container_entities is None:
+                logger.error("The new containers could not be inserted.")
+                return 1
+
+            # Create template file on the server
+            file_format_extension = form_data["file_format"]
+            template_name = get_template_name(container_entities, file_format_extension)
+            template_display_path, template_internal_path = helper.get_shared_filename(
+                template_name)
+            logger.info("Internal path: {}, Display path: {}".format(
+                template_internal_path, template_display_path))
+            if (file_format_extension == "csv"):
+                create_csv_template(
+                    template_internal_path=template_internal_path,
+                    container_entities=container_entities,
+                    container_type=container_type,
+                    container_size=container_size,
+                    parent_container=parent_container)
+                logger.info("Your CSV-template has been prepared successfully.\n" +
+                            "Download the file <a href=/Shared/{}>here</a>.".format(template_display_path))
+                try:
+                    send_mail_with_defaults(
+                        to=get_email_from_username(),
+                        subject=f"BIS container registration {template_name}",
+                        body=create_email_with_link_text(
+                            "container registration template", template_display_path)
+                    )
+                except KeyError as ke:
+                    logger.error(
+                        "There is a problem with the server's email configuration:\n\n"
+                        f"{ke}\n\nPlease contact your admin."
+                    )
+            else:
+                logger.error("{}There was a problem with the specified file format: {}.{}".format(
+                    ERROR_PREFIX, file_format_extension, ERROR_SUFFIX))
+
+    else:
+        logger.error("{}register_new_contaners.py was called without the JSON file in args.{}".format(
+            ERROR_PREFIX, ERROR_SUFFIX))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sample-management-custom/caosdb-server/scripting/bin/register_new_samples.py b/sample-management-custom/caosdb-server/scripting/bin/register_new_samples.py
new file mode 100755
index 0000000000000000000000000000000000000000..7724b577012ee3592a8de12a987b3f16054f6559
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/register_new_samples.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python3
+
+import csv
+import json
+import logging
+
+import linkahead as db
+from caosadvancedtools.serverside import helper
+from caosadvancedtools.serverside.logging import configure_server_side_logging
+
+from bis_utils import (create_email_with_link_text,
+                       get_description_row, get_email_from_username,
+                       get_options_row, send_mail_with_defaults)
+
+ERROR_PREFIX = 'Something went wrong: '
+ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+BIS_URL = 'https://localhost:10443/Entity/'
+logger = logging.getLogger("caosadvancedtools")
+
+
+def get_parser():
+    par = helper.get_argument_parser()
+    return par
+
+
+def get_responsible_person_id(data):
+    return int(data["responsible_person"])
+
+
+def get_responsible_person_abbreviation(data):
+    person = db.execute_query("FIND {}".format(get_responsible_person_id(data)))[0]
+    return person.get_property("Abbreviation").value
+
+
+def get_number_of_samples(data):
+    return int(data["number_of_samples"])
+# def is_samples_from_cruise(data):
+#     return ("samples_from_cruise" in data and data["samples_from_cruise"] == "on")
+# def has_start_and_end_time(data):
+#     return ("start_and_end_time" in data and data["start_and_end_time"] == "on")
+# def get_number_of_locations(data):
+#     number_of_locations = int(data["number_of_locations"])
+#     if number_of_locations < 1:
+#         number_of_locations = 1 # At least one location is OBLIGATORY
+#     return number_of_locations
+
+
+def get_reference_properties(data):
+    return data["reference_properties"]
+
+
+def get_column_names(data):
+    # Make sure each header starts with BIS ID and Main User
+    starting_names = ["BIS ID", "Main User"]
+    # required_column_names is just a string of column names separated by
+    # commas.
+    other_names = data["required_column_names"].split(',') + data["column_names"]
+    starting_names.extend([name.strip()
+                          for name in other_names if name.strip() not in starting_names])
+    return starting_names
+
+
+def create_sample_entities(data):
+    responsible_person_id = get_responsible_person_id(data)
+    number_of_samples = get_number_of_samples(data)
+    add_state = len(db.execute_query("FIND 'Sample life-cycle'"))
+
+    batch = db.Container()
+    for index in range(number_of_samples):
+        sample = db.Record()
+        sample.add_parent(name="Sample")
+        sample.add_property(name="Main User", value=responsible_person_id)
+        sample.add_property("Container")
+        sample.add_property("NagoyaCase")
+        if add_state:
+            sample.state = db.State(name="Registered", model="Sample life-cycle")
+        batch += [sample]
+    batch.insert()
+    return batch
+
+
+def get_template_name(sample_entities):
+    first_id = sample_entities[0].id
+    last_id = sample_entities[-1].id
+    return "sample_template_(IDs_{}_to_{}).csv".format(first_id, last_id)
+
+
+def configure(args):
+    global logger
+    # Check whether executed locally or as an SSS depending on
+    # auth_token argument.
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+        debug_file = configure_server_side_logging()
+        logger = logging.getLogger("caosadvancedtools")
+    else:
+        # logging config for local execution
+        logger = logging.getLogger("caosadvancedtools")
+        logger.addHandler(logging.StreamHandler(sys.stdout))
+        logger.setLevel(logging.DEBUG)
+        debug_file = None
+
+
+def read_input(args, log=False):
+    if hasattr(args, "filename") and args.filename:
+        # Read the input from the form (form.json)
+        with open(args.filename, mode='r') as form_json:
+            form_data = json.load(form_json)
+        if log:
+            with open(args.filename, mode='r') as form_json:
+                logger.info("Loaded JOSN file: \n{}".format(form_json.read()))
+        return form_data
+    else:
+        logger.error("{}register_new_samples.py was called without the JSON file in args.{}".format(
+            ERROR_PREFIX, ERROR_SUFFIX))
+        return None  # TODO: EXIT
+
+
+def create_csv_template(template_name, samples, form_input):
+    template_display_path, template_internal_path = helper.get_shared_filename(template_name)
+    # Read form_input
+    responsible_person_abbreviation = get_responsible_person_abbreviation(form_input)
+    number_of_samples = get_number_of_samples(form_input)
+    # samples_from_cruise = is_samples_from_cruise(form_input)
+    # start_and_end_time = has_start_and_end_time(form_input)
+    # number_of_locations = get_number_of_locations(form_input)
+    # reference_properties = get_reference_properties(form_input)
+    header = get_column_names(form_input)
+
+    with open(template_internal_path, 'w') as csv_template:
+        writer = csv.writer(csv_template)
+        writer.writerow(header)
+        writer.writerow(get_description_row(header))
+        writer.writerow(get_options_row(header))
+        # Write entity data to the lines
+        for entity in samples:
+            writer.writerow([
+                entity.id,
+                responsible_person_abbreviation
+            ])
+    return template_display_path
+
+
+def main():
+    parser = get_parser()
+    args = parser.parse_args()
+    configure(args)
+
+    form_input = read_input(args)
+    sample_entities = create_sample_entities(form_input)
+    template_name = get_template_name(sample_entities)
+    template_path = create_csv_template(template_name, sample_entities, form_input)
+    logger.info("Your CSV-template has been prepared successfully.\n" +
+                "Download the file <a href=/Shared/{}>here</a>.".format(template_path))
+    try:
+        send_mail_with_defaults(
+            to=get_email_from_username(),
+            subject=f"BIS sample registration {template_name}",
+            body=create_email_with_link_text("sample registration template", template_path)
+        )
+    except KeyError as ke:
+        logger.error(
+            "There is a problem with the server's email configuration:\n\n"
+            f"{ke}\n\nPlease contact your admin."
+        )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sample-management-custom/caosdb-server/scripting/bin/register_sample_template.py b/sample-management-custom/caosdb-server/scripting/bin/register_sample_template.py
new file mode 100755
index 0000000000000000000000000000000000000000..34b5d37889aec654165447a6b922d3f4674903e0
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/register_sample_template.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2024 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2024 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+import json
+import logging
+import sys
+
+import linkahead as db
+from caosadvancedtools.serverside import helper
+from caosadvancedtools.serverside.logging import configure_server_side_logging
+from linkahead.utils.escape import escape_squoted_text
+
+logger = logging.getLogger("caosadvancedtools")
+
+
+def main():
+
+    # We only need the default arguments for SSS
+    parser = helper.get_argument_parser()
+    args = parser.parse_args()
+
+    # We assume that if there is an authtoken, we're run serverside
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+        debug_file = configure_server_side_logging()
+    else:
+        # logging for local execution, print info on stdout
+        logger.addHandler(logging.StreamHandler(sys.stdout))
+        logger.setLevel(logging.DEBUG)
+        debug_file = None
+
+    if hasattr(args, "filename") and args.filename:
+        with open(args.filename) as form_json:
+            form_data = json.load(form_json)
+    else:
+        logger.error("Sample-template registration didn't receive any form information.")
+        return 0
+
+    template_rt = db.get_entity_by_name("RegisterSampleTemplate", role="RECORDTYPE")
+    template_name = form_data["template_name"]
+    candidates = db.execute_query(
+        f"FIND '{template_rt.name}' WITH name='{escape_squoted_text(template_name)}'"
+    )
+    if candidates:
+        logger.error(
+            f"A {template_rt.name} with name '{template_name}' exists in the database already. "
+            "Please choose a different name or rename the old entity."
+        )
+        return 0
+    rec = db.Record(name=template_name).add_parent(template_rt)
+    rec.add_property(name="column_names",
+                     value=form_data["column_names"], datatype=db.LIST(db.TEXT))
+    rec.add_property(name="Main User", value=form_data["main_user"])
+    try:
+        rec.insert()
+    except db.TransactionError as te:
+        logger.error(f"There was a problem inserting your {template_rt.name}:\n\n{te}")
+        return 0
+    logger.info(
+        f"Your {template_rt.name} was inserted successfully. You can use it for sample registration "
+        f"or visit it <a href=\"/Entity/{rec.id}\">here</a>."
+    )
+    return 0
+
+
+if __name__ == "__main__":
+
+    sys.exit(main())
diff --git a/sample-management-custom/caosdb-server/scripting/bin/update_containers.py b/sample-management-custom/caosdb-server/scripting/bin/update_containers.py
new file mode 100755
index 0000000000000000000000000000000000000000..6334551716ec80205b3765d81907f71e69a7026d
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/update_containers.py
@@ -0,0 +1,305 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the LinkAhead Project.
+#
+# Copyright (C) 2022 - 2024 GEOMAR
+# Copyright (C) 2022 Jakob Eckstein
+# Copyright (C) 2023 - 2024 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 - 2024 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import json
+import logging
+import os
+
+import linkahead as db
+import pandas as pd
+from caosadvancedtools.serverside import helper
+from caoscrawler import Crawler, SecurityMode
+from caoscrawler.crawl import _notify_about_inserts_and_updates
+from caoscrawler.logging import configure_server_side_logging
+from linkahead.cached import cached_query, cached_get_entity_by
+
+from bis_utils import (get_do_not_insert_type_names,
+                       replace_entity_urls_by_ids, whitespace_cleanup_in_df)
+
+# suppress warning of diff function
+apilogger = logging.getLogger("linkahead.apiutils")
+apilogger.setLevel(logging.ERROR)
+
+ERROR_PREFIX = 'Something went wrong: '
+ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+logger = logging.getLogger("caosadvancedtools")
+
+
+def _value_in_row(key, row):
+
+    if not key in row:
+        return False
+    if pd.isnull(row[key]) or row[key] is None or f"{row[key]}" == "":
+        return False
+    return True
+
+
+def _get_parent_by_identifier(parent_identifier):
+    """Get parent specified either by BIS ID, name, or BIS label."""
+    try:
+        parent_identifier = int(parent_identifier)
+        query = f"FIND Container WITH ID={parent_identifier}"
+    except ValueError:
+        query = (f"FIND Container WITH name='{parent_identifier}' "
+                 f"OR WITH 'BIS label'='{parent_identifier}'")
+    return cached_query(query)
+
+
+def get_parser():
+    par = helper.get_argument_parser()
+    return par
+
+
+def main():
+    userlog_public, htmluserlog_public, debuglog_public = configure_server_side_logging()
+    logger = logging.getLogger("caosadvancedtools")
+    parser = get_parser()
+    args = parser.parse_args()
+    # Check whether executed locally or as an SSS depending on
+    # auth_token argument.
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+
+    if hasattr(args, "filename") and args.filename:
+        upload_dir = os.path.dirname((args.filename))
+        # Read the input from the form (form.json)
+        with open(args.filename) as form_json:
+            form_data = json.load(form_json)
+        # Read content of th uplaoded file
+        path = os.path.join(upload_dir, form_data["container_metadata_file"])
+        data = whitespace_cleanup_in_df(pd.read_csv(path, comment='#'))
+    else:
+        raise ValueError("This script was called without the mandatory form data json file.")
+    data = replace_entity_urls_by_ids(data)
+
+    # Get referenced container entities
+    child_containers = db.Container()
+    parent_containers = db.Container()
+    for index, row in data.iterrows():
+        if not _value_in_row("BIS ID", row):
+            logger.error(f"BIS ID is missing in row {index+1}. Nothing was updated.")
+            return 1
+        try:
+            child = db.Record(id=int(row["BIS ID"]))
+        except ValueError:
+            logger.error(
+                f"Invalid BIS ID {row['BIS ID']} in row {index + 1}. Nothing was updated.")
+            return 1
+        child.add_parent("Container")
+        child_containers.append(child)
+
+        if _value_in_row("Parent container", row):
+            parent_identifier = row["Parent container"]
+            parent = _get_parent_by_identifier(parent_identifier)
+            if len(parent) == 0:
+                logger.error(
+                    f"Couldn't find parent with identifier '{parent_identifier}' in row {index+1}.")
+                return 1
+            elif len(parent) > 1:
+                logger.error(f"Parent with identifier '{parent_identifier}' in row {index+1} was not unique. "
+                             "Please specify with BIS ID instead.")
+                return 1
+            parent = parent[0]
+            try:
+                parent_containers.get_entity_by_id(parent.id)
+            except KeyError:
+                parent_containers.append(parent)
+
+    if not child_containers:
+        # Nothing to update
+        logger.error("There are no containers to be updated")
+        return 1
+
+    # Get IDs of proerperties
+    child_container_prop = cached_get_entity_by(query="FIND Property WITH name = 'Child container'")
+    custom_label_prop = cached_get_entity_by(query="FIND Property WITH name = 'Custom label'")
+    pdf_rt = cached_get_entity_by(query="FIND RECORDTYPE WITH name=PDFReport")
+
+    # Update (/create) container entities
+    for index, row in data.iterrows():
+        # Add child to parent
+        parent = None
+        if _value_in_row("Parent container", row):
+            parent_identifier = row["Parent container"]
+            # This has already been checked above for uniqueness
+            candidate = _get_parent_by_identifier(parent_identifier)[0]
+            # A bit redundant, but we need the exact Python object here that is in the parent_containers list.
+            parent = parent_containers.get_entity_by_id(candidate.id)
+
+            if parent.get_property(child_container_prop.id) is None:
+                parent.add_property(id=child_container_prop.id,
+                                    name=child_container_prop.name, value=[int(row["BIS ID"])])
+            else:
+                if parent.get_property(child_container_prop.id).value is None:
+                    parent.get_property(child_container_prop.id).value = [int(row["BIS ID"])]
+                else:
+                    if int(row["BIS ID"]) not in parent.get_property(child_container_prop.id).value:
+                        parent.get_property(child_container_prop.id).value.append(
+                            int(row["BIS ID"]))
+
+            # remove the current child from all other parents (don't do anything if the parent didn't change)
+            old_parents = cached_query(f"FIND Container WHICH REFERENCES {int(row['BIS ID'])}")
+            for old_parent in old_parents:
+                if parent is not None and old_parent.id == parent.id:
+                    # old parent also is new parent
+                    continue
+                try:
+                    # Has already been registered for updates
+                    old_parent = parent_containers.get_entity_by_id(old_parent.id)
+                except KeyError:
+                    parent_containers.append(old_parent)
+                old_parent.remove_value_from_property("Child container", int(
+                    row["BIS ID"]), remove_if_empty_afterwards=False)
+                if old_parent.get_property("Child container").value is None:
+                    old_parent.get_property("Child container").value = []
+
+        # Add custom label o child
+        child = child_containers.get_entity_by_id(id=int(row["BIS ID"]))
+        if _value_in_row("Custom label", row):
+            child.name = row["Custom label"]
+            if child.get_property(custom_label_prop.id) is None:
+                child.add_property(id=custom_label_prop.id,
+                                   name=custom_label_prop.name, value=row["Custom label"])
+            else:
+                child.get_property(custom_label_prop.id).value = row["Custom label"]
+
+        # Treat PI
+        if _value_in_row("PI", row):
+            pi = row["PI"]
+            pi_prop = cached_get_entity_by(query="FIND PROPERTY Pi")
+            try:
+                query = f"FIND RECORD Person WITH ID={int(pi)}"
+            except ValueError:
+                query = f"FIND RECORD Person WITH AN Abbreviation='{pi}'"
+            try:
+                pi_rec = cached_get_entity_by(query=query)
+                if child.get_property(pi_prop.name) is not None:
+                    child.get_property(pi_prop.name).value = pi_rec.id
+                else:
+                    child.add_property(id=pi_prop.id, name=pi_prop.name, value=pi_rec.id)
+            except db.EmptyUniqueQueryError:
+                logger.warning(f"There is no PI with BIS ID or abbreviation {pi}. Skipping.")
+
+        # Collection(s)
+        if _value_in_row("Collection", row):
+            collection_rt = cached_get_entity_by(query="FIND RECORDTYPE Collection")
+            if not ";" in str(row["Collection"]):
+                collections = [row["Collection"]]
+            else:
+                collections = [coll.strip() for coll in str(row["Collection"]).split(';')]
+            prop_val = []
+            for coll in collections:
+                try:
+                    query = f"FIND RECORD Collection WITH ID={int(coll)}"
+                except ValueError:
+                    query = f"FIND RECORD Collection WITH name='{coll}'"
+                try:
+                    coll_rec = cached_get_entity_by(query=query)
+                    prop_val.append(coll_rec.id)
+                except db.EmptyUniqueQueryError:
+                    logger.warning(f"There is no collection with name or BIS ID {coll}. Skipping.")
+                    continue
+            if prop_val:
+                if child.get_property("Collection") is not None:
+                    child.get_property("Collection").datatype = db.LIST("Collection")
+                    child.get_property("Collection").value = prop_val
+                else:
+                    child.add_property(id=collection_rt.id, name=collection_rt.name, datatype=db.LIST(
+                        "Collection"), value=prop_val)
+
+        # Treat Container Contents
+        if _value_in_row("Container Contents", row):
+            if not (_value_in_row("PI", row) and _value_in_row("Collection", row)):
+                logger.error(
+                    f"Container Contents are given for container {child.id} but it "
+                    "is missing PI and/or Collection info. No updates have been performed."
+                )
+                return 1
+            contents_prop = cached_get_entity_by(query="FIND PROPERTY 'Container Contents'")
+            if child.get_property(contents_prop.name) is not None:
+                child.get_property(contents_prop.name).value = row["Container Contents"]
+            else:
+                child.add_property(id=contents_prop.id, name=contents_prop.name,
+                                   value=row["Container Contents"])
+
+        # Treat PDF Report
+        if _value_in_row("PDFReport", row):
+            pdf_id = row["PDFReport"]
+            try:
+                pdf_id = int(pdf_id)
+                pdf_rec = cached_query(f"FIND FILE PDFReport WITH ID={pdf_id}")
+                if not pdf_rec:
+                    logger.warning(
+                        f"There is no PDFReport with Bis ID {pdf_id}, so no PDF is attached to container {child.id}.")
+                else:
+                    if child.get_property("PDFReport") is not None:
+                        child.get_property("PDFReport").value = pdf_id
+                    else:
+                        child.add_property(id=pdf_rt.id, name=pdf_rt.name, value=pdf_id)
+            except ValueError:
+                logger.warning(
+                    f"There is no valid Bis ID provided for container {child.id}."
+                    f"Provided was {pdf_id}. Skipping")
+
+    # This is a workaround for weird merging errors in the
+    # crawler. TODO(fspreck): Remove after merge of sync_node and sync_graph and
+    # following release.
+    merged = []
+    for par in parent_containers:
+        if (data['BIS ID'] == par.id).any():
+            # A container to be updated is used as another containers parent:
+            child = child_containers.get_entity_by_id(par.id)
+            # All parents have a child sample property with a value (which may
+            # be empty). No child sample has this property, so the following is
+            # okay without checks:
+            prop = par.get_property("Child container")
+            child.add_property(name=prop.name, id=prop.id, value=prop.value)
+            merged.append(par)
+    for par in merged:
+        # All relevant information, i.e., the new children have been merged into
+        # the corresponding child, so drop this.
+        parent_containers.remove(par)
+    # TODO Add notes as CommentAnnotation
+    crawler = Crawler(securityMode=SecurityMode.UPDATE)
+    to_be_synchronized = child_containers + parent_containers
+
+    inserts, updates = crawler.synchronize(
+        commit_changes=True, unique_names=False, crawled_data=to_be_synchronized,
+        no_insert_RTs=get_do_not_insert_type_names()
+    )
+    if "SHARED_DIR" in os.environ:
+        _notify_about_inserts_and_updates(len(inserts), len(updates), htmluserlog_public,
+                                          crawler.run_id)
+    for ent in inserts + updates:
+        ent.retrieve_acl()
+        ent.grant(role='Stock Manager', priority=False, permission="EDIT:ACL")
+        ent.update_acl()
+    logger.info(f"Successfully processed {len(child_containers)} containers and "
+                f"{len(parent_containers)} parent containers.")
+
+    # TODO Create new Spreadsheet for download
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sample-management-custom/caosdb-server/scripting/bin/upload_sample_template.py b/sample-management-custom/caosdb-server/scripting/bin/upload_sample_template.py
new file mode 100755
index 0000000000000000000000000000000000000000..f66744a9b118f90311718f802741f8e546e98e5c
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/bin/upload_sample_template.py
@@ -0,0 +1,342 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the LinkAhead Project.
+#
+# Copyright (C) 2024 GEOMAR
+# Copyright (C) 2022 Jakob Eckstein
+# Copyright (C) 2024 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com>
+# Copyright (C) 2024 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import json
+import logging
+import os
+import pandas as pd
+import re
+import subprocess
+import sys
+
+from datetime import date, datetime
+from dateutil.relativedelta import relativedelta
+from pathlib import Path
+from tempfile import NamedTemporaryFile
+
+import linkahead as db
+
+from caosadvancedtools.datainconsistency import DataInconsistencyError
+from caosadvancedtools.serverside import helper
+from caosadvancedtools.table_importer import CSVImporter
+from caoscrawler.logging import configure_server_side_logging
+
+from bis_utils import (replace_entity_urls_by_ids,
+                       SPECIAL_TREATMENT_SAMPLE, whitespace_cleanup_in_df)
+
+# suppress warning of diff function
+apilogger = logging.getLogger("linkahead.apiutils")
+apilogger.setLevel(logging.ERROR)
+
+
+def semicolon_separated_list(text):
+    return [el.strip() for el in text.split(";") if el != ""]
+
+
+def _embargo_converter(text: str):
+
+    datepattern = r"^(?P<year>\d{4,4})-(?P<month>\d{2,2})-(?P<day>\d{2,2})"
+    matches = re.match(datepattern, str(text))
+    if matches:
+        return date(int(matches.groupdict()["year"]), int(matches.groupdict()["month"]), int(matches.groupdict()["day"]))
+    if f"{text}".lower() in ["true", "yes"]:
+        # yes means embargo until today in one year
+        return date.today() + relativedelta(years=1)
+    if f"{text}".lower() in ["false", "no"]:
+        return ""
+    raise ValueError(
+        f"The embargo should be either a date in YYYY-MM-DD format, or 'true'/'yes' or 'false'/'no', but is {text}.")
+
+
+ERROR_PREFIX = 'Something went wrong: '
+ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+
+
+# Column datatypes
+DATATYPE_DEFINITIONS = {
+    "AphiaID": int,
+    "BIS ID": str,
+    "Campaign": str,
+    "Date collected start": str,
+    "Date collected stop": str,
+    "Date sampled start": str,
+    "Date sampled stop": str,
+    "Fixation": str,
+    "Gear configuration": str,
+    "Gear": str,
+    "Hol": int,
+    "Latitude start": float,
+    "Latitude stop": float,
+    "Longitude start": float,
+    "Longitude stop": float,
+    "Main User": str,
+    "Nagoya case number": str,
+    "PI": str,
+    "Parent BIS ID": str,
+    "Platform": str,
+    "Sample Context": str,
+    "Sample container": str,
+    "SampleType": str,
+    "SampleTypeSpecific": str,
+    "Sampling Person": str,
+    "Sampling depth start": float,
+    "Sampling depth stop": float,
+    "Sampling method": str,
+    "Station ID": str,
+    "Station number": str,
+    "Storage Container Label": str,
+    "Storage ID": str,
+    "StorageTemperature": str,
+    "Subevent": str,
+    "Time collected start": str,
+    "Time collected stop": str,
+    "Time sampled start": str,
+    "Time sampled stop": str,
+    "Timezone": str,
+    "Water depth start": float,
+    "Water depth stop": float,
+}
+
+# Obligatory columns: Must exist and must not be empty
+# Must exist
+OBLIGATORY_COLUMNS = [
+    "BIS ID",
+    "Collection",
+    "Date collected start",
+    "Fixation",
+    "Gear",
+    "Latitude start",
+    "Longitude start",
+    "Main User",
+    "Nagoya case number",
+    "PI",
+    "Sample Context",
+    "Sample container",
+    "SampleType",
+    "SampleTypeSpecific",
+    "Sphere",
+    "Storage ID",
+    "StorageTemperature",
+]
+
+OBLIGATORY_COLUMNS_CHILD = [
+    "BIS ID",
+    "Date sampled start",
+    "Fixation",
+    "Main User",
+    "Parent BIS ID",
+    "Sample Context",
+    "Sample container",
+    "SampleType",
+    "SampleTypeSpecific",
+    "Sphere",
+    "Storage ID",
+    "StorageTemperature",
+]
+
+COLUMN_CONVERTER = {
+    "Collection": semicolon_separated_list,
+    "Ecotaxa URL": semicolon_separated_list,
+    "NCBI Accession": semicolon_separated_list,
+    "NCBI BioProject": semicolon_separated_list,
+    "NCBI BioSample": semicolon_separated_list,
+    "OSIS URL": semicolon_separated_list,
+    "Embargo": _embargo_converter,
+    "Publications": semicolon_separated_list,
+    "Sphere": semicolon_separated_list,
+}
+
+logger = logging.getLogger("caosadvancedtools")
+
+
+def get_parser():
+    par = helper.get_argument_parser()
+    return par
+
+
+def _is_child_sample_table(filename):
+    tmp_data = pd.read_csv(filename, sep=',')
+    if 'Parent BIS ID' in tmp_data.columns:
+        return not tmp_data["Parent BIS ID"].isnull().all()
+    return False
+
+
+def read_data_from_file(filename):
+    if _is_child_sample_table(filename):
+        oblig = OBLIGATORY_COLUMNS_CHILD
+    else:
+        oblig = OBLIGATORY_COLUMNS
+
+    table_importer = CSVImporter(
+        converters=COLUMN_CONVERTER,
+        obligatory_columns=oblig,
+        unique_keys=None,
+        datatypes=DATATYPE_DEFINITIONS,
+        existing_columns=oblig,
+    )
+    try:
+        df = table_importer.read_file(filename, sep=",", comment="#")
+    except TypeError as te:
+        logger.error(
+            f"There was a wrong datatype detected in your CSV: \n{te}\n"
+            "Please verify that all entries have the correct type, e.g., no floating "
+            "point numbers in integer columns or text in numeric columns."
+        )
+        raise DataInconsistencyError("There was a problem with the CSV upload.")
+
+    # strip leading and trailing whitespaces
+    return whitespace_cleanup_in_df(df)
+
+
+def _get_converter_from_property_datatype(dt):
+    if dt == db.TEXT:
+        return str
+    elif dt == db.REFERENCE:
+        return int
+    elif dt == db.DOUBLE:
+        return float
+    elif dt == db.INTEGER:
+        return int
+    elif dt == db.FILE:
+        return int
+    elif dt == db.DATETIME:
+        raise NotImplementedError()
+    elif dt == db.BOOLEAN:
+        return bool
+    else:
+        raise ValueError(f"Property has unknown datatype {dt}")
+
+
+def main():
+    parser = get_parser()
+    args = parser.parse_args()
+    # Check whether executed locally or as an SSS depending on
+    # auth_token argument.
+    if hasattr(args, "auth_token") and args.auth_token:
+        db.configure_connection(auth_token=args.auth_token)
+        userlog_public, htmluserlog_public, debuglog_public = configure_server_side_logging()
+    else:
+        rootlogger = logging.getLogger()
+        rootlogger.setLevel(logging.INFO)
+        logger.setLevel(logging.DEBUG)
+        handler = logging.StreamHandler(stream=sys.stdout)
+        handler.setLevel(logging.DEBUG)
+        rootlogger.addHandler(handler)
+        userlog_public = "/tmp/upload_sample_userlog.log"
+        htmluserlog_public = "/tmp/upload_sample_userlog.html"
+        debuglog_public = "/tmp/upload_sample_debuglog.html"
+
+    if hasattr(args, "filename") and args.filename:
+        if hasattr(args, "auth_token") and args.auth_token:
+            upload_dir = os.path.dirname((args.filename))
+            # Read the input from the form (form.json)
+            with open(args.filename) as form_json:
+                form_data = json.load(form_json)
+            # Read content of th uplaoded file
+            path = os.path.join(upload_dir, form_data["sample_template_file"])
+        else:
+            path = args.filename
+
+        # Extend the converter lists by those properties that are unknown to this script but are
+        # Properties of Sample
+        sample = db.execute_query("FIND RECORDTYPE Sample", unique=True)
+        for eprop in sample.properties:
+            property_name = eprop.name
+            if property_name in SPECIAL_TREATMENT_SAMPLE:
+                continue
+            if db.apiutils.is_reference(eprop):
+                rt = db.get_entity_by_id(eprop.id)
+                if len(rt.properties) == 1:
+                    converter = _get_converter_from_property_datatype(rt.properties[0].datatype)
+                elif len(rt.properties) == 1:
+                    converter = str
+                else:
+                    converter = None
+            else:
+                converter = _get_converter_from_property_datatype(eprop.datatype)
+            if converter is None:
+                continue
+            DATATYPE_DEFINITIONS[property_name] = converter
+            if sample.get_importance(property_name) == db.OBLIGATORY:
+                # This is only needed if the sample is not a child sample
+                OBLIGATORY_COLUMNS.append(property_name)
+        try:
+            data = read_data_from_file(path)
+            data = replace_entity_urls_by_ids(data, ["BIS ID", "Storage ID", "Parent BIS ID"])
+            pickle_out = NamedTemporaryFile(delete=False, suffix=".pkl")
+            data.to_pickle(pickle_out.name)
+        except DataInconsistencyError as err:
+            # DataInconsistencyError is logged elsewhere
+            logger.error(f"Sample upload failed.\n{err}")
+            return 1
+
+        parent_path = Path(__file__).parent
+        db.Info()  # call db.Info to refresh the auth_token, just to be sure.
+        conn = db.get_connection()
+        auth_token_callee = conn._authenticator.auth_token
+
+        cmds = [
+            str(parent_path / "crawl_sample_data_async.py"),
+            "--auth-token",
+            auth_token_callee,
+            args.filename,
+            pickle_out.name,
+            Path(path).name
+        ]
+
+        myenv = os.environ.copy()
+        myenv["HOME"] = str(parent_path.parent / "home")
+
+        # For a few samples, we can run the upload directly and give
+        # user output. More samples will be run asynchronously.
+        if data.shape[0] < 20:
+            p = subprocess.Popen(cmds, start_new_session=False, env=myenv,
+                                 cwd=parent_path, stdout=subprocess.PIPE,
+                                 stderr=subprocess.PIPE)
+            stdout, stderr = p.communicate()
+            if p.returncode == 0:
+                # We know that the stdout will be formatted by the
+                # serverside logging helper, so we don't wrap it but
+                # just print it.
+                print(stdout.decode())
+            else:
+                logger.error(stderr.decode())
+                print(stdout.decode())
+        else:
+            logger.info(
+                "Starting sample upload in the background. This may take a while. "
+                "You will be notified by email when it has finished."
+            )
+            p = subprocess.Popen(cmds, start_new_session=True, env=myenv,
+                                 cwd=parent_path)
+
+    else:
+        msg = "{}upload_sample_template.py was called without the JSON file in args.{}".format(
+            ERROR_PREFIX, ERROR_SUFFIX)
+        logger.error(msg)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sample-management-custom/caosdb-server/scripting/home/identifiables.yml b/sample-management-custom/caosdb-server/scripting/home/identifiables.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f37c5c380ca38a583d74a8f09314f3850b810e47
--- /dev/null
+++ b/sample-management-custom/caosdb-server/scripting/home/identifiables.yml
@@ -0,0 +1,40 @@
+NagoyaCase:
+  - 'Nagoya case number'
+Device:
+  - name
+SourceEvent:
+  - Gear
+  - Time start
+  - Position
+Gear:
+  # TODO?
+  - name
+Fixation:
+  # TODO?
+  - enumValue
+Platform:
+  # TODO?
+  - name
+FunctionalGroup:
+  # TODO?
+  - name
+SampleType:
+  # TODO?
+  - name
+Collection:
+  - name
+SampleTypeSpecific:
+  # TODO?
+  - name
+Sphere:
+  - name
+StartPosition:
+  - Latitude
+  - Longitude
+  - Water depth
+  - Sampling depth
+StopPosition:
+  - Latitude
+  - Longitude
+  - Water depth
+  - Sampling depth