Skip to content
Snippets Groups Projects

F fix url formatting

Merged Florian Spreckelsen requested to merge f-fix-url-formatting into dev
All threads resolved!

Files

+ 11
8
@@ -39,6 +39,7 @@ import sys
@@ -39,6 +39,7 @@ import sys
import traceback
import traceback
import uuid
import uuid
import warnings
import warnings
 
from argparse import RawTextHelpFormatter
from argparse import RawTextHelpFormatter
from copy import deepcopy
from copy import deepcopy
from datetime import datetime
from datetime import datetime
@@ -72,6 +73,7 @@ from .scanner import (create_converter_registry, initialize_converters,
@@ -72,6 +73,7 @@ from .scanner import (create_converter_registry, initialize_converters,
from .stores import GeneralStore
from .stores import GeneralStore
from .structure_elements import StructureElement
from .structure_elements import StructureElement
from .sync_graph import SyncGraph
from .sync_graph import SyncGraph
 
from .utils import get_shared_resource_link
logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
@@ -750,16 +752,17 @@ one with the entities that need to be updated and the other with entities to be
@@ -750,16 +752,17 @@ one with the entities that need to be updated and the other with entities to be
# Sending an Email with a link to a form to authorize updates is
# Sending an Email with a link to a form to authorize updates is
if get_config_setting("send_crawler_notifications"):
if get_config_setting("send_crawler_notifications"):
filename = OldCrawler.save_form([el[3] for el in pending_changes], path, run_id)
filename = OldCrawler.save_form([el[3] for el in pending_changes], path, run_id)
text = """Dear Curator,
link_address = get_shared_resource_link(db.configuration.get_config()[
 
"Connection"]["url"], filename)
 
changes = "\n".join([el[3] for el in pending_changes])
 
text = f"""Dear Curator,
there where changes that need your authorization. Please check the following
there where changes that need your authorization. Please check the following
carefully and if the changes are ok, click on the following link:
carefully and if the changes are ok, click on the following link:
{url}/Shared/{filename}
{link_address}
{changes}
{changes}
""".format(url=db.configuration.get_config()["Connection"]["url"],
"""
filename=filename,
changes="\n".join([el[3] for el in pending_changes]))
try:
try:
fro = get_config_setting("sendmail_from_address")
fro = get_config_setting("sendmail_from_address")
to = get_config_setting("sendmail_to_address")
to = get_config_setting("sendmail_to_address")
@@ -899,7 +902,7 @@ the CaosDB Crawler successfully crawled the data and
@@ -899,7 +902,7 @@ the CaosDB Crawler successfully crawled the data and
if get_config_setting("create_crawler_status_records"):
if get_config_setting("create_crawler_status_records"):
text += ("You can checkout the CrawlerRun Record for more information:\n"
text += ("You can checkout the CrawlerRun Record for more information:\n"
f"{domain}/Entity/?P=0L10&query=find%20crawlerrun%20with%20run_id=%27{run_id}%27\n\n")
f"{domain}/Entity/?P=0L10&query=find%20crawlerrun%20with%20run_id=%27{run_id}%27\n\n")
text += (f"You can download the logfile here:\n{domain}/Shared/" + logfile)
text += (f"You can download the logfile here:\n{get_shared_resource_link(domain, logfile)}")
send_mail(
send_mail(
from_addr=get_config_setting("sendmail_from_address"),
from_addr=get_config_setting("sendmail_from_address"),
to=get_config_setting("sendmail_to_address"),
to=get_config_setting("sendmail_to_address"),
@@ -1059,7 +1062,7 @@ def crawler_main(crawled_directory_path: str,
@@ -1059,7 +1062,7 @@ def crawler_main(crawled_directory_path: str,
userlog_public, htmluserlog_public, debuglog_public = configure_server_side_logging()
userlog_public, htmluserlog_public, debuglog_public = configure_server_side_logging()
# TODO make this optional
# TODO make this optional
_create_status_record(
_create_status_record(
get_config_setting("public_host_url") + "/Shared/" + htmluserlog_public,
get_shared_resource_link(get_config_setting("public_host_url"), htmluserlog_public),
crawler.run_id)
crawler.run_id)
else: # setup stdout logging for other cases
else: # setup stdout logging for other cases
root_logger = logging.getLogger()
root_logger = logging.getLogger()
@@ -1128,7 +1131,7 @@ def crawler_main(crawled_directory_path: str,
@@ -1128,7 +1131,7 @@ def crawler_main(crawled_directory_path: str,
# pylint: disable=E0601
# pylint: disable=E0601
domain = get_config_setting("public_host_url")
domain = get_config_setting("public_host_url")
logger.error("Unexpected Error: Please tell your administrator about this and provide "
logger.error("Unexpected Error: Please tell your administrator about this and provide "
f"the following path.\n{domain}/Shared/" + debuglog_public)
f"the following path.\n{get_shared_resource_link(domain, debuglog_public)}")
_update_status_record(crawler.run_id, 0, 0, status="FAILED")
_update_status_record(crawler.run_id, 0, 0, status="FAILED")
return 1
return 1
Loading