Skip to main content

easyfabric.data.configmanager

json

logging

random

re

string

dataclass

datetime

timezone

Optional

notebookutils

DataClassFromDictMixin

check_format

read_file_as_string

yaml_to_json

to_snake_case

def to_snake_case(string: str) -> str

get_current_datetime

def get_current_datetime()

get_current_date

def get_current_date()

get_current_time

def get_current_time()

get_random_string

def get_random_string()

initialize_config

def initialize_config(file_path: str = "Files/Configuration/config.yaml")

get_config

def get_config()

Model Objects

@dataclass
class Model(DataClassFromDictMixin)

model

tabularname

compatibilitylevel

workspace

addmembers

databaseschema

defaultpowerbidatasourceversion

logintype

partitiondeployment

roledeployment

lakehouse

sourcequeryculture

modelfolder

modelfile

datasourcetype

datasourcename

dataset

tabularsystemfolder

measuretemplatefile

formatstringfile

Backup Objects

@dataclass
class Backup(DataClassFromDictMixin)

backupname

layer

storageaccount

container

tablefolder

enabled

exceptfolders

schema

Lakehouse Objects

@dataclass
class Lakehouse(DataClassFromDictMixin)

layer

useschema

workspace

workspaceid

lakehouse

mountpoint

abfspath

mountpath

defaultschema

set_mount_point

def set_mount_point()

get_schema

def get_schema()

ConfigManager Objects

@dataclass
class ConfigManager(DataClassFromDictMixin)

tenantid

applicationclientid

appsecretvalue

keyvault

batch_id

stop_at_error

verboselogging

run_prebronzenotebook

run_postbronzenotebook

run_presilvernotebook

run_postsilvernotebook

refresh_after_load

process_sample_rows

config_file

metafolder

columnnamestolower

addmembers

applicationkeyvaultsecret

bimfile

bimfolder

compatibilitylevel

connectionstring

culture

databaseschema

datasourcename

defaultdatasource

defaultpowerbidatasourceversion

derivedmodelfile

dimobjectprefix

dimbusinesskeyprefix

dimsurrogatekeyprefix

silverprimarykeyseparator

surrogatekeyunknownvalue

factobjectprefix

objectprimarykeycolumn

objecttimestampcolumn

objectpartitioncolumn

objectsourcetagcolumn

silverconvertedpostfix

silverconvertedcolumn

formatstring_file

jsonfolder

measurefolder

measuretemplate_file

modelfile

modelfolder

rolefile

sourcequeryculture

sqlfolder

tabularhiddenfolder

tabularname

tabularsystemfolder

transformationobjectprefix

yamlfolder

bronzeconnectionprefix

bronzekeepsourcecolumnnames

bronzetableprefix

defaultseparator

bronzefolder

lakehouses

models

backups

historytablepostfix

schemahistory

silverloadretry

silverloaddelay

notebooktimeout

__post_init__

def __post_init__()

from_yaml

@classmethod
def from_yaml(cls, yaml_string: str)

Create a Model instance from a YAML string.

from_yaml_file

@classmethod
def from_yaml_file(cls, file_path: str = "Files/Configuration/config.yaml")

Create a Model instance from a YAML file.

__new__

def __new__(cls, *args, **kwargs)

skip_notebookprebronze

def skip_notebookprebronze()

set_stop_at_error

def set_stop_at_error()

set_refresh_after_load

def set_refresh_after_load(refresh: bool)

set_secret_value

def set_secret_value(secret_value: str)

set_custom_batchid

def set_custom_batchid(custom_batch_id: str)

set_sample_rows

def set_sample_rows(rows: int)

get_keyvault_info

def get_keyvault_info()

get_bronze_lakehouse

def get_bronze_lakehouse()

get_bronze_lakehouse_name

def get_bronze_lakehouse_name()

get_lakehouse_by_layer

def get_lakehouse_by_layer(_layer: str) -> Lakehouse

get_workspace_by_layer

def get_workspace_by_layer(_layer: str)

get_model

def get_model(model_name: str) -> Model

Get a model by name from the models list.

Arguments:

  • model_name str - Name of the model to retrieve

Returns:

  • Model - The model object matching the given name

Raises:

  • Exception - If the model is not found in configuration

get_paths_by_layer

def get_paths_by_layer(_layer: str)

set_mountpoints

def set_mountpoints()

log_dataframe_schema

def log_dataframe_schema(df) -> None

Logs the schema of a Spark DataFrame at debug level in pretty-printed JSON format.

:param df: Spark DataFrame

log_verbose_message

def log_verbose_message(message: str) -> None

Logs message to debug or the info (if verbose logging is enabled).

:param message: string