diff --git a/src/caoscrawler/conv_impl/spss.py b/src/caoscrawler/conv_impl/spss.py
index 2b11ebe080cf6fa0adf1fec23df6399a0f3bc2a0..824d4f8eb27f5adf63d14cc045e53f788cae88f9 100644
--- a/src/caoscrawler/conv_impl/spss.py
+++ b/src/caoscrawler/conv_impl/spss.py
@@ -28,6 +28,7 @@ import yaml
 from .. import converters
 from ..stores import GeneralStore
 from ..structure_elements import (File, StructureElement)
+from typing import Optional
 
 
 TYPES = {
@@ -56,7 +57,7 @@ class SPSSConverter(converters.TableConverter):
         return self._children_from_dataframe(df)
 
 
-def spss_to_yaml(savfile: str, yamlfile: str, cfood: str = None) -> None:
+def spss_to_yaml(savfile: str, yamlfile: str, cfood: Optional[str] = None) -> None:
     """Parse the *.sav and create basic datamodel in ``yamlfile``.
 
 Parameters
diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 53172dd2d99c3ab6b317083b899871e978be4672..6fc90e300c422efe637c0c4049443a94c34145d7 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -1307,7 +1307,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
             res[converter.name]["subtree"][k[0]] = d[k[0]]
         return res
 
-    def save_debug_data(self, filename: str, debug_tree: DebugTree = None):
+    def save_debug_data(self, filename: str, debug_tree: Optional[DebugTree] = None):
         """
         Save the information contained in a debug_tree to a file named filename.
         """
diff --git a/src/caoscrawler/hdf5_converter.py b/src/caoscrawler/hdf5_converter.py
index 5b1ff5775fb74919c989507c449636fd822db7f0..482d59c12d2d0b8540c01bd04da718d9c514ddc4 100644
--- a/src/caoscrawler/hdf5_converter.py
+++ b/src/caoscrawler/hdf5_converter.py
@@ -18,6 +18,8 @@
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
 #
 
+from typing import Optional
+
 try:
     import h5py
 except ModuleNotFoundError:
@@ -94,8 +96,8 @@ def convert_h5_element(elt: Union[h5py.Group, h5py.Dataset], name: str):
     raise ValueError("The given element must be either a HDF5 Group or Dataset object.")
 
 
-def convert_basic_element_with_nd_array(value, name: str = None,
-                                        internal_path: str = None, msg_prefix: str = ""):
+def convert_basic_element_with_nd_array(value, name: Optional[str] = None,
+                                        internal_path: Optional[str] = None, msg_prefix: str = ""):
     """Convert a given object either to an ndarray structure element or to a
     basic scalar structure element.
 
diff --git a/src/caoscrawler/identifiable.py b/src/caoscrawler/identifiable.py
index cefdf4a0f42b1f610e0712fdefebc2dc3b78d69f..e69e1092950f5d24e5be31f2f74cf3a6302512c5 100644
--- a/src/caoscrawler/identifiable.py
+++ b/src/caoscrawler/identifiable.py
@@ -24,7 +24,7 @@ import linkahead as db
 from datetime import datetime
 import json
 from hashlib import sha256
-from typing import Union
+from typing import Optional, Union
 import logging
 
 logger = logging.getLogger(__name__)
@@ -50,9 +50,10 @@ class Identifiable():
     backrefs: list, TODO future
     """
 
-    def __init__(self, record_id: int = None, path: str = None, record_type: str = None,
-                 name: str = None, properties: dict = None,
-                 backrefs: list[Union[int, str]] = None):
+    def __init__(self, record_id: Optional[int] = None, path: Optional[str] = None,
+                 record_type: Optional[str] = None,
+                 name: Optional[str] = None, properties: Optional[dict] = None,
+                 backrefs: Optional[list[Union[int, str]]] = None):
         if (record_id is None and path is None and name is None
                 and (backrefs is None or len(backrefs) == 0)
                 and (properties is None or len(properties) == 0)):