diff --git a/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_samplemanagement.js b/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_samplemanagement.js
index 92a854ec460bf1bc17b50a72fd9f54e53666f613..ba4dcb4809afc498c8ab49baaeed2ebdda7ff130 100644
--- a/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_samplemanagement.js
+++ b/sample-management-custom/caosdb-server/caosdb-webui/src/ext/js/ext_samplemanagement.js
@@ -94,7 +94,7 @@ const ext_samplemanagement = function($, navbar, log, form_elements, form_panel,
         script: "export_sample_csv.py",
         fields: [{
             type: "text",
-            name: "bis_ids",
+            name: "ids",
             label: sample_id_label,
             required: false,
             pattern: "((([0-9]+\\s*-\\s*){0,1}[0-9]+)\\s*,\\s*)*(([0-9]+\\s*-\\s*){0,1}[0-9]+)",
diff --git a/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py b/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py
index 4b79fdd81e1af2ea56b770e83559423f2423a090..41ca25f59f281b514c74a94dab87c3de30453c71 100755
--- a/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py
+++ b/sample-management-custom/caosdb-server/scripting/bin/export_sample_csv.py
@@ -25,33 +25,28 @@ import logging
 import os
 import sys
 import urllib
-from datetime import date, datetime
-from typing import List
 
 import linkahead as db
 import pandas as pd
-from caosadvancedtools.datainconsistency import DataInconsistencyError
 from caosadvancedtools.serverside import helper
 from caosadvancedtools.table_export import BaseTableExporter
-from caosadvancedtools.table_importer import CSVImporter
-from caoscrawler import Crawler, SecurityMode
-from caoscrawler.crawl import ForbiddenTransaction
-from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
+from caoscrawler.config import get_config_setting
 from caoscrawler.logging import configure_server_side_logging
 from linkahead.cached import cached_get_entity_by, cached_query as cquery
 from linkahead.common.datatype import get_id_of_datatype
 from linkahead.exceptions import (EmptyUniqueQueryError, QueryNotUniqueError,
                                   TransactionError)
-from dateutil import parser as dateparser
-from dateutil.parser import isoparse
 
 from bis_utils import (create_email_with_link_text,
                        get_description_row, get_email_from_username,
-                       get_options_row, send_mail_with_defaults,
-                       SPECIAL_TREATMENT_SAMPLE as SPECIAL_TREATMENT)
+                       get_options_row, send_mail_with_defaults)
 from export_container_csv import (generate_label_text,
                                   extract_storage_chain as container_storage_chain)
-from upload_sample_template import DATATYPE_DEFINITIONS
+from sample_helpers.sample_upload_column_definitions import (
+    DATATYPE_DEFINITIONS, SPECIAL_TREATMENT_SAMPLE as SPECIAL_TREATMENT)
+from sample_helpers.utils import (CONSTANTS, get_column_header_name,
+                                  get_entity_name)
+
 
 # suppress warning of diff function
 apilogger = logging.getLogger("linkahead.apiutils")
@@ -59,8 +54,8 @@ apilogger.setLevel(logging.ERROR)
 
 logger = logging.getLogger("caosadvancedtools")
 
-ERROR_PREFIX = 'Something went wrong: '
-ERROR_SUFFIX = ' Please conatct <a href="mailto:biosamples@geomar.de">biosamples@geomar.de</a> if you encounter this issue.'
+ERROR_PREFIX = CONSTANTS["error_prefix"]
+ERROR_SUFFIX = CONSTANTS["error_suffix"]
 
 
 def cached_record(i):
@@ -283,7 +278,7 @@ def extract_hol(record, key):
 
 def extract_bis_url(record, key):
     # base_uri = db.get_config().get("Connection", "url")
-    base_uri = "https://biosamples.geomar.de/"
+    base_uri = get_config_setting("public_host_url")
     return urllib.parse.urljoin(base_uri, f"Entity/{record.id}")
 
 
@@ -336,7 +331,7 @@ def extract_event_url(record, key):
     if not events:
         return None
     if len(events) == 1:
-        return urllib.parse.urljoin("https://biosamples.geomar.de", f"Entity/{events[0].id}")
+        return urllib.parse.urljoin(get_config_setting("public_host_url"), f"Entity/{events[0].id}")
     logger.debug(f"Sample {record.id} has multiple events.")
     return None
 
@@ -347,10 +342,6 @@ EXTRACTORS = {
     "Parent BIS ID": extract_parent_sample,
     "AphiaID": default_find,
     "Collection": extract_reference_name,
-    "Date collected start": extract_date,
-    "Date collected stop": extract_date,
-    "Date sampled start": extract_date,
-    "Date sampled stop": extract_date,
     "Main User": extract_person,
     "Sampling Person": extract_person,
     "PI": extract_person,
@@ -402,14 +393,14 @@ IGNORE_KEYS = [
 
 # Additional list of keys to be ignored when extracting parent sample information
 IGNORE_KEYS_PARENT = IGNORE_KEYS + [
-    "BIS ID",
+    "LinkAhead ID",
 ]
 
 # List of columns to be exported although they are not known to or ignored by
 # the import.
 ADDITIONAL_EXPORTS = [
-    "BIS URL",
-    "Parent BIS ID",
+    "LinkAhead URL",
+    "Parent LinkAhead ID",
     "Storage chain",
 ]
 
@@ -540,25 +531,25 @@ def to_csv(samples):
 def retrieve_samples(data):
     container = []
     not_found = []
-    for bis_id in data:
-        if isinstance(bis_id, int):
+    for eid in data:
+        if isinstance(eid, int):
             try:
                 container.append(
-                    cached_get_entity_by(query=f"FIND RECORD SAMPLE WITH id='{bis_id}'"))
+                    cached_get_entity_by(query=f"FIND RECORD SAMPLE WITH id='{eid}'"))
             except EmptyUniqueQueryError as e:
                 # we want to warn about these
-                not_found.append(bis_id)
+                not_found.append(eid)
         else:
             found_at_least_one_in_range = False
-            for next_bis_id in bis_id:
+            for next_eid in eid:
                 try:
                     container.append(
-                        cached_get_entity_by(query=f"FIND RECORD Sample WITH id='{next_bis_id}'"))
+                        cached_get_entity_by(query=f"FIND RECORD Sample WITH id='{next_eid}'"))
                     found_at_least_one_in_range = True
                 except EmptyUniqueQueryError as e:
                     pass
             if not found_at_least_one_in_range:
-                not_found.append(f"{bis_id.start}-{bis_id.stop-1}")
+                not_found.append(f"{eid.start}-{eid.stop-1}")
     return container, not_found
 
 
@@ -626,19 +617,6 @@ def main():
                 logger.info("Removing empty columns from export")
                 no_empty_columns = True
 
-            if "from_date" in form_data:
-                # Inserted after ...
-                data = [el.id for el in db.execute_query(
-                    "SELECT id FROM sample WHICH REFERENCES A SourceEvent "
-                    "WHICH HAS AN IGSN AND "
-                    f"(WHICH WAS INSERTED SINCE {form_data['from_date']})")
-                ]
-                # ... + update after
-                data += [el.id for el in db.execute_query(
-                    "SELECT id FROM sample WHICH REFERENCES A SourceEvent "
-                    "WHICH HAS AN IGSN AND "
-                    f"(WHICH WAS UPDATED SINCE {form_data['from_date']})")
-                ]
             elif "query_string" in form_data and form_data["query_string"]:
                 query_string = form_data["query_string"]
                 if not query_string.lower().startswith("find ") and not query_string.lower().startswith("select "):
@@ -657,11 +635,11 @@ def main():
                     )
                     return
             else:
-                if not form_data["bis_ids"]:
+                if not form_data["ids"]:
                     logger.error(
                         "Please specify the samples to be exported either by query or by id(s).")
                     return
-                tmp = form_data["bis_ids"].split(",")
+                tmp = form_data["ids"].split(",")
                 data = []
                 for d in tmp:
                     if "-" in d: