chore(code): Create globals singleton

This commit is contained in:
Marty Oehme 2024-07-15 20:42:57 +02:00
parent d2c25a9033
commit 0d05ed981a
Signed by: Marty
GPG key ID: EDBF2ED917B2EF6A
4 changed files with 19 additions and 13 deletions

View file

@ -9,15 +9,10 @@ from matplotlib import pyplot as plt
import seaborn as sns
from tabulate import tabulate
import bibtexparser
import src.globals as g
sns.set_style("whitegrid")
PROJECT_DIR=Path(os.getenv("QUARTO_PROJECT_DIR", "."))
DATA_DIR=PROJECT_DIR.joinpath("02-data")
RAW_DATA=DATA_DIR.joinpath("raw")
WORKING_DATA=DATA_DIR.joinpath("intermediate")
PROCESSED_DATA=DATA_DIR.joinpath("processed")
SUPPLEMENTARY_DATA=DATA_DIR.joinpath("supplementary")
## Creates 3 important data structures:
# df: The main dataframe containing all final sample studies
# df_by_intervention: The same dataframe but split up by individual interventions per study
@ -26,18 +21,18 @@ SUPPLEMENTARY_DATA=DATA_DIR.joinpath("supplementary")
from src.process import add_metadata as meta
# raw database-search results
bib_sample_raw_db = prep_data.bib_library_from_dir(RAW_DATA)
bib_sample_raw_db = meta.bib_library_from_dir(g.RAW_DATA)
# the complete library of sampled (and working) literature
bib_sample = prep_data.bib_library_from_dir(WORKING_DATA)
bib_sample = meta.bib_library_from_dir(g.WORKING_DATA)
# load relevant studies
from src.extract import load_data as load
# each observation in a single dataframe
raw_observations = load_data.from_yml(PROCESSED_DATA),
study_metadata = prep_data.bib_metadata_df(bib_sample),
country_groups = prep_data.country_groups_df(Path(f"{SUPPLEMENTARY_DATA}/wb-country-groupings.xlsx")),
df = meta.observations_with_metadata_df(
raw_observations = load.from_yml(g.PROCESSED_DATA),
study_metadata = meta.bib_metadata_df(bib_sample),
country_groups = meta.country_groups_df(Path(f"{g.SUPPLEMENTARY_DATA}/wb-country-groupings.xlsx")),
)
# all observations but split per individual intervention

View file

@ -749,7 +749,7 @@ Internal validity ranking. Adapted from @Maitrot2017.
#| label: full-search-query
#| echo: false
#| output: asis
with open(f"{SUPPLEMENTARY_DATA}/query.txt") as f:
with open(f"{g.SUPPLEMENTARY_DATA}/query.txt") as f:
query = f.read()
t3 = "`" * 3

View file

@ -1700,7 +1700,7 @@ while relying on indicators for measurement which are flexible yet overlapping e
#| label: full-search-query
#| echo: false
#| output: asis
with open(f"{SUPPLEMENTARY_DATA}/query.txt") as f:
with open(f"{g.SUPPLEMENTARY_DATA}/query.txt") as f:
query = f.read()
t3 = "`" * 3

11
src/globals.py Normal file
View file

@ -0,0 +1,11 @@
from pathlib import Path
import os
PROJECT_DIR=Path(os.getenv("QUARTO_PROJECT_DIR", "."))
DATA_DIR=PROJECT_DIR.joinpath("02-data")
RAW_DATA=DATA_DIR.joinpath("raw")
WORKING_DATA=DATA_DIR.joinpath("intermediate")
PROCESSED_DATA=DATA_DIR.joinpath("processed")
SUPPLEMENTARY_DATA=DATA_DIR.joinpath("supplementary")