From 1313b82f6f83f08e91e80da1acac33f2da219f35 Mon Sep 17 00:00:00 2001 From: Michael Date: Wed, 4 Sep 2024 22:09:51 +0300 Subject: [PATCH 01/18] dynamically generate readme with function list categorized --- .github/workflows/_generate_readme.py | 255 ++++++++++++++++++++++ README.md | 290 ++++++++++++++++++++++++++ src/sempy_labs/_model_bpa.py | 2 +- 3 files changed, 546 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/_generate_readme.py diff --git a/.github/workflows/_generate_readme.py b/.github/workflows/_generate_readme.py new file mode 100644 index 00000000..7965a2d7 --- /dev/null +++ b/.github/workflows/_generate_readme.py @@ -0,0 +1,255 @@ +import inspect +import os +from collections import defaultdict +import sempy_labs +import sempy_labs.migration +import sempy_labs.report +import sempy_labs.directlake +from sempy_labs.tom import TOMWrapper +import sempy_labs.lakehouse + +dirs = { + sempy_labs: 'labs', + sempy_labs.directlake: 'directlake', + sempy_labs.lakehouse: 'lake', + sempy_labs.migration: 'migration', + sempy_labs.report: 'rep', + TOMWrapper: 'tom', +} + +markdown_content = """ +# Semantic Link Labs + +[![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs) +[![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.7.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs) + +--- +[Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/) +--- + +This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more! + +Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). + +If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=). + +If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=). + +## Install the library in a Fabric notebook +```python +%pip install semantic-link-labs +``` + +## Once installed, run this code to import the library into your notebook +```python +import sempy_labs as labs +from sempy_labs import migration, directlake +from sempy_labs import lakehouse as lake +from sempy_labs import report as rep +from sempy_labs.tom import connect_semantic_model +``` + +## Load semantic-link-labs into a custom [Fabric environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment) +An even better way to ensure the semantic-link-labs library is available in your workspace/notebooks is to load it as a library in a custom Fabric environment. If you do this, you will not have to run the above '%pip install' code every time in your notebook. Please follow the steps below. + +#### Create a custom environment +1. Navigate to your Fabric workspace +2. Click 'New' -> More options +3. Within 'Data Science', click 'Environment' +4. Name your environment, click 'Create' + +#### Add semantic-link-labs as a library to the environment +1. Within 'Public libraries', click 'Add from PyPI' +2. Enter 'semantic-link-labs'. +3. Click 'Save' at the top right of the screen +4. Click 'Publish' at the top right of the screen +5. Click 'Publish All' + +#### Update your notebook to use the new environment (*must wait for the environment to finish publishing*) +1. Navigate to your Notebook +2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook + + + +""" + + +link_prefix = "https://semantic-link-labs.readthedocs.io/en/stable/" +skip_functions = ['connect_semantic_model', '__init__', 'close'] +functions = {} +for d, d_alias in dirs.items(): + d_name = d.__name__ + for attr_name in dir(d): + attr = getattr(d, attr_name) + if inspect.isfunction(attr): + if attr_name not in skip_functions: + category = 'General' + if d_alias == 'migration': + category = 'Direct Lake Migration' + elif d_alias == 'rep': + category = 'Report' + elif d_alias == 'tom': + category = 'Tabular Object Model (TOM)' + elif d_alias == 'directlake' or 'direct' in attr_name: + category = 'Direct Lake' + elif '_git' in attr_name: + category = 'Git' + elif 'deployment_pipeline' in attr_name: + category = 'Deployment Pipelines' + elif d_alias == 'lake' or 'shortcut' in attr_name or 'lake' in attr_name: + category = 'Lakehouse' + elif 'qso' in attr_name: + category = 'Query Scale Out' + elif attr_name.startswith('resolve_') or attr_name in ['save_as_delta_table', 'create_abfss_path', 'create_relationship_name', 'generate_embedded_filter', 'format_dax_object_name', 'get_capacity_name', 'get_capacity_id']: + category = 'Helper Functions' + elif 'semantic' in attr_name or 'measure_depend' in attr_name or 'dataset' in attr_name or 'evaluate_dax' in attr_name or attr_name in ['clear_cache', 'get_model_calc_dependencies', 'get_object_level_security']: + category = 'Semantic Model' + elif 'bpa' in attr_name or 'vertipaq' in attr_name: + category = 'Model Optimization' + elif 'assign' in attr_name or 'provision' in attr_name or ('user' in attr_name and 'workspace' in attr_name) or attr_name in ['set_workspace_default_storage_format']: + category = 'Admin' + elif 'pool' in attr_name or 'spark' in attr_name: + category = 'Spark' + functions[attr_name] = category + +functions = dict(sorted(functions.items(), key=lambda item: (item[1], item[0]))) + +category_to_funcs = defaultdict(list) +tom_cat = 'Tabular Object Model (TOM)' +category_to_funcs[tom_cat] = defaultdict(list) +for func, category in functions.items(): + if 'TOM' not in category: + category_to_funcs[category].append(func) + else: + if 'vertipaq' in func or func in ['row_count', 'used_size', 'data_size', 'dictionary_size', 'total_size', 'cardinality', 'records_per_segment']: + category_to_funcs[tom_cat]["Vertipaq Stats"].append(func) + elif 'policy' in func or 'incremental' in func: + category_to_funcs[tom_cat]["Incremental Refresh"].append(func) + elif 'annotation' in func: + category_to_funcs[tom_cat]["Annotations"].append(func) + elif 'extended_prop' in func: + category_to_funcs[tom_cat]["Extended Properties"].append(func) + elif 'perspective' in func: + category_to_funcs[tom_cat]["Perspectives"].append(func) + elif 'translat' in func: + category_to_funcs[tom_cat]["Translations"].append(func) + elif func.startswith('all_'): + category_to_funcs[tom_cat]["'All' functions"].append(func) + elif func.startswith('add_'): + category_to_funcs[tom_cat]["'Add' functions"].append(func) + elif func.startswith('set_'): + category_to_funcs[tom_cat]["'Set' functions"].append(func) + elif func.startswith('update_'): + category_to_funcs[tom_cat]["'Update' functions"].append(func) + elif func.startswith('remove_'): + category_to_funcs[tom_cat]["'Remove' functions"].append(func) + elif func.startswith('used_in_') or func in ['depends_on', 'referenced_by', 'fully_qualified_measures', 'unqualified_columns']: + category_to_funcs[tom_cat]["'Used-in' and dependency functions"].append(func) + else: + category_to_funcs[tom_cat]["Misc functions"].append(func) + +sorted_category_to_funcs = {} +for category in sorted(category_to_funcs.keys()): + if isinstance(category_to_funcs[category], defaultdict): + sorted_subcategories = {} + for subcategory in sorted(category_to_funcs[category].keys()): + sorted_subcategories[subcategory] = sorted(category_to_funcs[category][subcategory]) + sorted_category_to_funcs[category] = sorted_subcategories + else: + sorted_category_to_funcs[category] = sorted(category_to_funcs[category]) + +markdown_content += '## Function Categories\n' +for category, funcs in sorted_category_to_funcs.items(): + if 'TOM' in category: + markdown_content += "\n### [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))" + for sub_category, tom_list in funcs.items(): + markdown_content += f"\n#### {sub_category}" + for tom_func in tom_list: + markdown_content += f"\n* [{tom_func}](https://github.com/microsoft/semantic-link-labs)" + markdown_content += '\n' + markdown_content += '\n' + else: + markdown_content += f"\n### {category}" + for func in funcs: + markdown_content += f"\n* [{func}](https://github.com/microsoft/semantic-link-labs)" + markdown_content += '\n' + +markdown_content += """ +--- +## Direct Lake migration + +The following process automates the migration of an import/DirectQuery model to a new [Direct Lake](https://learn.microsoft.com/power-bi/enterprise/directlake-overview) model. The first step is specifically applicable to models which use Power Query to perform data transformations. If your model does not use Power Query, you must migrate the base tables used in your semantic model to a Fabric lakehouse. + +Check out [Nikola Ilic](https://twitter.com/DataMozart)'s terrific [blog post](https://data-mozart.com/migrate-existing-power-bi-semantic-models-to-direct-lake-a-step-by-step-guide/) on this topic! + +Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) on this topic! + +[![Direct Lake Migration Video](https://img.youtube.com/vi/gGIxMrTVyyI/0.jpg)](https://www.youtube.com/watch?v=gGIxMrTVyyI?t=495) + +### Prerequisites + +* Make sure you [enable XMLA Read/Write](https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#enable-xmla-read-write) for your capacity +* Make sure you have a [lakehouse](https://learn.microsoft.com/fabric/onelake/create-lakehouse-onelake#create-a-lakehouse) in a Fabric workspace +* Enable the following [setting](https://learn.microsoft.com/power-bi/transform-model/service-edit-data-models#enable-the-preview-feature): Workspace -> Workspace Settings -> General -> Data model settings -> Users can edit data models in the Power BI service + +### Instructions + +1. Download this [notebook](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Migration%20to%20Direct%20Lake.ipynb). +2. Make sure you are in the ['Data Engineering' persona](https://learn.microsoft.com/fabric/get-started/microsoft-fabric-overview#components-of-microsoft-fabric). Click the icon at the bottom left corner of your Workspace screen and select 'Data Engineering' +3. In your workspace, select 'New -> Import notebook' and import the notebook from step 1. +4. [Add your lakehouse](https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse) to your Fabric notebook +5. Follow the instructions within the notebook. + +### The migration process + +> [!NOTE] +> The first 4 steps are only necessary if you have logic in Power Query. Otherwise, you will need to migrate your semantic model source tables to lakehouse tables. + +1. The first step of the notebook creates a Power Query Template (.pqt) file which eases the migration of Power Query logic to Dataflows Gen2. +2. After the .pqt file is created, sync files from your [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222) +3. Navigate to your lakehouse (this is critical!). From your lakehouse, create a new Dataflows Gen2, and import the Power Query Template file. Doing this step from your lakehouse will automatically set the destination for all tables to this lakehouse (instead of having to manually map each one). +4. Publish the Dataflow Gen2 and wait for it to finish creating the delta lake tables in your lakehouse. +5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model. + +> [!NOTE] +> As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table). + +6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click. + +### Completing these steps will do the following: +* Offload your Power Query logic to Dataflows Gen2 inside of Fabric (where it can be maintained and development can continue). +* Dataflows Gen2 will create delta tables in your Fabric lakehouse. These tables can then be used for your Direct Lake model. +* Create a new semantic model in Direct Lake mode containing all the standard tables and columns, calculation groups, measures, relationships, hierarchies, roles, row level security, perspectives, and translations from your original semantic model. +* Viable calculated tables are migrated to the new semantic model as data tables. Delta tables are dynamically generated in the lakehouse to support the Direct Lake model. The calculated table DAX logic is stored as model annotations in the new semantic model. +* Field parameters are migrated to the new semantic model as they were in the original semantic model (as calculated tables). Any calculated columns used in field parameters are automatically removed in the new semantic model's field parameter(s). +* Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.). +* Reports used by your original semantic model will be rebinded to your new semantic model. + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. + +When you submit a pull request, a CLA bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + +## Trademarks + +This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft +trademarks or logos is subject to and must follow +[Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). +Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. +Any use of third-party trademarks or logos are subject to those third-party's policies. +""" + +output_path = os.path.join('/root/semantic-link-labs', 'README.md') +with open(output_path, 'w') as f: + f.write(markdown_content) diff --git a/README.md b/README.md index 98a55af5..01fa1e6f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ + # Semantic Link Labs [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs) @@ -51,6 +52,295 @@ An even better way to ensure the semantic-link-labs library is available in your 1. Navigate to your Notebook 2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook + + +## Function Categories + +### Admin +* [add_user_to_workspace](https://github.com/microsoft/semantic-link-labs) +* [assign_workspace_to_capacity](https://github.com/microsoft/semantic-link-labs) +* [assign_workspace_to_dataflow_storage](https://github.com/microsoft/semantic-link-labs) +* [delete_user_from_workspace](https://github.com/microsoft/semantic-link-labs) +* [deprovision_workspace_identity](https://github.com/microsoft/semantic-link-labs) +* [list_workspace_role_assignments](https://github.com/microsoft/semantic-link-labs) +* [list_workspace_users](https://github.com/microsoft/semantic-link-labs) +* [provision_workspace_identity](https://github.com/microsoft/semantic-link-labs) +* [set_workspace_default_storage_format](https://github.com/microsoft/semantic-link-labs) +* [unassign_workspace_from_capacity](https://github.com/microsoft/semantic-link-labs) +* [update_workspace_user](https://github.com/microsoft/semantic-link-labs) + +### Deployment Pipelines +* [list_deployment_pipeline_stage_items](https://github.com/microsoft/semantic-link-labs) +* [list_deployment_pipeline_stages](https://github.com/microsoft/semantic-link-labs) +* [list_deployment_pipelines](https://github.com/microsoft/semantic-link-labs) + +### Direct Lake +* [add_table_to_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [check_fallback_reason](https://github.com/microsoft/semantic-link-labs) +* [direct_lake_schema_compare](https://github.com/microsoft/semantic-link-labs) +* [direct_lake_schema_sync](https://github.com/microsoft/semantic-link-labs) +* [generate_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [get_direct_lake_guardrails](https://github.com/microsoft/semantic-link-labs) +* [get_direct_lake_lakehouse](https://github.com/microsoft/semantic-link-labs) +* [get_direct_lake_source](https://github.com/microsoft/semantic-link-labs) +* [get_direct_lake_sql_endpoint](https://github.com/microsoft/semantic-link-labs) +* [get_directlake_guardrails_for_sku](https://github.com/microsoft/semantic-link-labs) +* [get_shared_expression](https://github.com/microsoft/semantic-link-labs) +* [get_sku_size](https://github.com/microsoft/semantic-link-labs) +* [list_direct_lake_model_calc_tables](https://github.com/microsoft/semantic-link-labs) +* [show_unsupported_direct_lake_objects](https://github.com/microsoft/semantic-link-labs) +* [update_direct_lake_model_lakehouse_connection](https://github.com/microsoft/semantic-link-labs) +* [update_direct_lake_partition_entity](https://github.com/microsoft/semantic-link-labs) +* [warm_direct_lake_cache_isresident](https://github.com/microsoft/semantic-link-labs) +* [warm_direct_lake_cache_perspective](https://github.com/microsoft/semantic-link-labs) + +### Direct Lake Migration +* [create_pqt_file](https://github.com/microsoft/semantic-link-labs) +* [migrate_calc_tables_to_lakehouse](https://github.com/microsoft/semantic-link-labs) +* [migrate_calc_tables_to_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [migrate_field_parameters](https://github.com/microsoft/semantic-link-labs) +* [migrate_model_objects_to_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [migrate_tables_columns_to_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [migration_validation](https://github.com/microsoft/semantic-link-labs) +* [refresh_calc_tables](https://github.com/microsoft/semantic-link-labs) + +### General +* [create_warehouse](https://github.com/microsoft/semantic-link-labs) +* [get_notebook_definition](https://github.com/microsoft/semantic-link-labs) +* [import_notebook_from_web](https://github.com/microsoft/semantic-link-labs) +* [list_capacities](https://github.com/microsoft/semantic-link-labs) +* [list_dashboards](https://github.com/microsoft/semantic-link-labs) +* [list_dataflow_storage_accounts](https://github.com/microsoft/semantic-link-labs) +* [list_dataflows](https://github.com/microsoft/semantic-link-labs) +* [list_warehouses](https://github.com/microsoft/semantic-link-labs) +* [update_item](https://github.com/microsoft/semantic-link-labs) + +### Git +* [commit_to_git](https://github.com/microsoft/semantic-link-labs) +* [connect_workspace_to_git](https://github.com/microsoft/semantic-link-labs) +* [disconnect_workspace_from_git](https://github.com/microsoft/semantic-link-labs) +* [get_git_connection](https://github.com/microsoft/semantic-link-labs) +* [get_git_status](https://github.com/microsoft/semantic-link-labs) +* [initialize_git_connection](https://github.com/microsoft/semantic-link-labs) +* [update_from_git](https://github.com/microsoft/semantic-link-labs) + +### Helper Functions +* [create_abfss_path](https://github.com/microsoft/semantic-link-labs) +* [create_relationship_name](https://github.com/microsoft/semantic-link-labs) +* [format_dax_object_name](https://github.com/microsoft/semantic-link-labs) +* [generate_embedded_filter](https://github.com/microsoft/semantic-link-labs) +* [get_capacity_id](https://github.com/microsoft/semantic-link-labs) +* [get_capacity_name](https://github.com/microsoft/semantic-link-labs) +* [resolve_capacity_name](https://github.com/microsoft/semantic-link-labs) +* [resolve_dataset_id](https://github.com/microsoft/semantic-link-labs) +* [resolve_dataset_name](https://github.com/microsoft/semantic-link-labs) +* [resolve_item_type](https://github.com/microsoft/semantic-link-labs) +* [resolve_report_id](https://github.com/microsoft/semantic-link-labs) +* [resolve_report_name](https://github.com/microsoft/semantic-link-labs) +* [resolve_workspace_capacity](https://github.com/microsoft/semantic-link-labs) +* [save_as_delta_table](https://github.com/microsoft/semantic-link-labs) + +### Lakehouse +* [create_shortcut_onelake](https://github.com/microsoft/semantic-link-labs) +* [delete_shortcut](https://github.com/microsoft/semantic-link-labs) +* [export_model_to_onelake](https://github.com/microsoft/semantic-link-labs) +* [get_lakehouse_columns](https://github.com/microsoft/semantic-link-labs) +* [get_lakehouse_tables](https://github.com/microsoft/semantic-link-labs) +* [lakehouse_attached](https://github.com/microsoft/semantic-link-labs) +* [list_lakehouses](https://github.com/microsoft/semantic-link-labs) +* [list_shortcuts](https://github.com/microsoft/semantic-link-labs) +* [optimize_lakehouse_tables](https://github.com/microsoft/semantic-link-labs) +* [resolve_lakehouse_id](https://github.com/microsoft/semantic-link-labs) +* [resolve_lakehouse_name](https://github.com/microsoft/semantic-link-labs) +* [vacuum_lakehouse_tables](https://github.com/microsoft/semantic-link-labs) + +### Model Optimization +* [import_vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs) +* [model_bpa_rules](https://github.com/microsoft/semantic-link-labs) +* [run_model_bpa](https://github.com/microsoft/semantic-link-labs) +* [run_model_bpa_bulk](https://github.com/microsoft/semantic-link-labs) +* [vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs) + +### Query Scale Out +* [disable_qso](https://github.com/microsoft/semantic-link-labs) +* [list_qso_settings](https://github.com/microsoft/semantic-link-labs) +* [qso_sync](https://github.com/microsoft/semantic-link-labs) +* [qso_sync_status](https://github.com/microsoft/semantic-link-labs) +* [set_qso](https://github.com/microsoft/semantic-link-labs) + +### Report +* [clone_report](https://github.com/microsoft/semantic-link-labs) +* [create_model_bpa_report](https://github.com/microsoft/semantic-link-labs) +* [create_report_from_reportjson](https://github.com/microsoft/semantic-link-labs) +* [export_report](https://github.com/microsoft/semantic-link-labs) +* [get_report_definition](https://github.com/microsoft/semantic-link-labs) +* [get_report_json](https://github.com/microsoft/semantic-link-labs) +* [launch_report](https://github.com/microsoft/semantic-link-labs) +* [report_rebind](https://github.com/microsoft/semantic-link-labs) +* [report_rebind_all](https://github.com/microsoft/semantic-link-labs) +* [update_report_from_reportjson](https://github.com/microsoft/semantic-link-labs) + +### Semantic Model +* [backup_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [cancel_dataset_refresh](https://github.com/microsoft/semantic-link-labs) +* [clear_cache](https://github.com/microsoft/semantic-link-labs) +* [copy_semantic_model_backup_file](https://github.com/microsoft/semantic-link-labs) +* [create_blank_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [create_model_bpa_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [create_semantic_model_from_bim](https://github.com/microsoft/semantic-link-labs) +* [deploy_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [evaluate_dax_impersonation](https://github.com/microsoft/semantic-link-labs) +* [get_measure_dependencies](https://github.com/microsoft/semantic-link-labs) +* [get_model_calc_dependencies](https://github.com/microsoft/semantic-link-labs) +* [get_object_level_security](https://github.com/microsoft/semantic-link-labs) +* [get_semantic_model_bim](https://github.com/microsoft/semantic-link-labs) +* [is_default_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [list_reports_using_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [list_semantic_model_objects](https://github.com/microsoft/semantic-link-labs) +* [measure_dependency_tree](https://github.com/microsoft/semantic-link-labs) +* [refresh_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [restore_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [set_semantic_model_storage_format](https://github.com/microsoft/semantic-link-labs) +* [translate_semantic_model](https://github.com/microsoft/semantic-link-labs) + +### Spark +* [create_custom_pool](https://github.com/microsoft/semantic-link-labs) +* [delete_custom_pool](https://github.com/microsoft/semantic-link-labs) +* [get_spark_settings](https://github.com/microsoft/semantic-link-labs) +* [list_custom_pools](https://github.com/microsoft/semantic-link-labs) +* [update_custom_pool](https://github.com/microsoft/semantic-link-labs) +* [update_spark_settings](https://github.com/microsoft/semantic-link-labs) + +### [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet)) +#### 'Add' functions +* [add_calculated_column](https://github.com/microsoft/semantic-link-labs) +* [add_calculated_table](https://github.com/microsoft/semantic-link-labs) +* [add_calculated_table_column](https://github.com/microsoft/semantic-link-labs) +* [add_calculation_group](https://github.com/microsoft/semantic-link-labs) +* [add_calculation_item](https://github.com/microsoft/semantic-link-labs) +* [add_data_column](https://github.com/microsoft/semantic-link-labs) +* [add_entity_partition](https://github.com/microsoft/semantic-link-labs) +* [add_expression](https://github.com/microsoft/semantic-link-labs) +* [add_field_parameter](https://github.com/microsoft/semantic-link-labs) +* [add_hierarchy](https://github.com/microsoft/semantic-link-labs) +* [add_m_partition](https://github.com/microsoft/semantic-link-labs) +* [add_measure](https://github.com/microsoft/semantic-link-labs) +* [add_relationship](https://github.com/microsoft/semantic-link-labs) +* [add_role](https://github.com/microsoft/semantic-link-labs) +* [add_table](https://github.com/microsoft/semantic-link-labs) +* [add_time_intelligence](https://github.com/microsoft/semantic-link-labs) + +#### 'All' functions +* [all_calculated_columns](https://github.com/microsoft/semantic-link-labs) +* [all_calculated_tables](https://github.com/microsoft/semantic-link-labs) +* [all_calculation_groups](https://github.com/microsoft/semantic-link-labs) +* [all_calculation_items](https://github.com/microsoft/semantic-link-labs) +* [all_columns](https://github.com/microsoft/semantic-link-labs) +* [all_date_tables](https://github.com/microsoft/semantic-link-labs) +* [all_hierarchies](https://github.com/microsoft/semantic-link-labs) +* [all_hybrid_tables](https://github.com/microsoft/semantic-link-labs) +* [all_levels](https://github.com/microsoft/semantic-link-labs) +* [all_measures](https://github.com/microsoft/semantic-link-labs) +* [all_partitions](https://github.com/microsoft/semantic-link-labs) +* [all_rls](https://github.com/microsoft/semantic-link-labs) + +#### 'Remove' functions +* [remove_alternate_of](https://github.com/microsoft/semantic-link-labs) +* [remove_object](https://github.com/microsoft/semantic-link-labs) +* [remove_sort_by_column](https://github.com/microsoft/semantic-link-labs) + +#### 'Set' functions +* [set_aggregations](https://github.com/microsoft/semantic-link-labs) +* [set_alternate_of](https://github.com/microsoft/semantic-link-labs) +* [set_data_coverage_definition](https://github.com/microsoft/semantic-link-labs) +* [set_data_type](https://github.com/microsoft/semantic-link-labs) +* [set_direct_lake_behavior](https://github.com/microsoft/semantic-link-labs) +* [set_encoding_hint](https://github.com/microsoft/semantic-link-labs) +* [set_is_available_in_mdx](https://github.com/microsoft/semantic-link-labs) +* [set_kpi](https://github.com/microsoft/semantic-link-labs) +* [set_ols](https://github.com/microsoft/semantic-link-labs) +* [set_rls](https://github.com/microsoft/semantic-link-labs) +* [set_sort_by_column](https://github.com/microsoft/semantic-link-labs) +* [set_summarize_by](https://github.com/microsoft/semantic-link-labs) + +#### 'Update' functions +* [update_calculation_item](https://github.com/microsoft/semantic-link-labs) +* [update_column](https://github.com/microsoft/semantic-link-labs) +* [update_m_partition](https://github.com/microsoft/semantic-link-labs) +* [update_measure](https://github.com/microsoft/semantic-link-labs) +* [update_role](https://github.com/microsoft/semantic-link-labs) + +#### 'Used-in' and dependency functions +* [depends_on](https://github.com/microsoft/semantic-link-labs) +* [fully_qualified_measures](https://github.com/microsoft/semantic-link-labs) +* [referenced_by](https://github.com/microsoft/semantic-link-labs) +* [unqualified_columns](https://github.com/microsoft/semantic-link-labs) +* [used_in_calc_item](https://github.com/microsoft/semantic-link-labs) +* [used_in_data_coverage_definition](https://github.com/microsoft/semantic-link-labs) +* [used_in_hierarchies](https://github.com/microsoft/semantic-link-labs) +* [used_in_levels](https://github.com/microsoft/semantic-link-labs) +* [used_in_relationships](https://github.com/microsoft/semantic-link-labs) +* [used_in_rls](https://github.com/microsoft/semantic-link-labs) +* [used_in_sort_by](https://github.com/microsoft/semantic-link-labs) + +#### Annotations +* [clear_annotations](https://github.com/microsoft/semantic-link-labs) +* [get_annotation_value](https://github.com/microsoft/semantic-link-labs) +* [get_annotations](https://github.com/microsoft/semantic-link-labs) +* [remove_annotation](https://github.com/microsoft/semantic-link-labs) +* [set_annotation](https://github.com/microsoft/semantic-link-labs) + +#### Extended Properties +* [clear_extended_properties](https://github.com/microsoft/semantic-link-labs) +* [get_extended_properties](https://github.com/microsoft/semantic-link-labs) +* [get_extended_property_value](https://github.com/microsoft/semantic-link-labs) +* [remove_extended_property](https://github.com/microsoft/semantic-link-labs) +* [set_extended_property](https://github.com/microsoft/semantic-link-labs) + +#### Incremental Refresh +* [add_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) +* [apply_refresh_policy](https://github.com/microsoft/semantic-link-labs) +* [has_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) +* [show_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) +* [update_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) + +#### Misc functions +* [has_aggs](https://github.com/microsoft/semantic-link-labs) +* [has_date_table](https://github.com/microsoft/semantic-link-labs) +* [has_hybrid_table](https://github.com/microsoft/semantic-link-labs) +* [is_agg_table](https://github.com/microsoft/semantic-link-labs) +* [is_auto_date_table](https://github.com/microsoft/semantic-link-labs) +* [is_calculated_table](https://github.com/microsoft/semantic-link-labs) +* [is_date_table](https://github.com/microsoft/semantic-link-labs) +* [is_direct_lake](https://github.com/microsoft/semantic-link-labs) +* [is_direct_lake_using_view](https://github.com/microsoft/semantic-link-labs) +* [is_field_parameter](https://github.com/microsoft/semantic-link-labs) +* [is_hybrid_table](https://github.com/microsoft/semantic-link-labs) +* [mark_as_date_table](https://github.com/microsoft/semantic-link-labs) + +#### Perspectives +* [add_perspective](https://github.com/microsoft/semantic-link-labs) +* [add_to_perspective](https://github.com/microsoft/semantic-link-labs) +* [in_perspective](https://github.com/microsoft/semantic-link-labs) +* [remove_from_perspective](https://github.com/microsoft/semantic-link-labs) + +#### Translations +* [add_translation](https://github.com/microsoft/semantic-link-labs) +* [remove_translation](https://github.com/microsoft/semantic-link-labs) +* [set_translation](https://github.com/microsoft/semantic-link-labs) + +#### Vertipaq Stats +* [cardinality](https://github.com/microsoft/semantic-link-labs) +* [data_size](https://github.com/microsoft/semantic-link-labs) +* [dictionary_size](https://github.com/microsoft/semantic-link-labs) +* [records_per_segment](https://github.com/microsoft/semantic-link-labs) +* [remove_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs) +* [row_count](https://github.com/microsoft/semantic-link-labs) +* [set_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs) +* [total_size](https://github.com/microsoft/semantic-link-labs) +* [used_size](https://github.com/microsoft/semantic-link-labs) + + --- ## Direct Lake migration diff --git a/src/sempy_labs/_model_bpa.py b/src/sempy_labs/_model_bpa.py index 668380f1..1d239cdc 100644 --- a/src/sempy_labs/_model_bpa.py +++ b/src/sempy_labs/_model_bpa.py @@ -21,7 +21,6 @@ import sempy_labs._icons as icons from pyspark.sql.functions import col, flatten from pyspark.sql.types import StructType, StructField, StringType -import polib import os @@ -66,6 +65,7 @@ def run_model_bpa( """ from synapse.ml.services import Translate + import polib if "extend" in kwargs: print( From ce36b69ed00841016030a959aa2b88959c3302d2 Mon Sep 17 00:00:00 2001 From: Michael Date: Wed, 4 Sep 2024 23:50:45 +0300 Subject: [PATCH 02/18] added function_examples.md --- .github/workflows/_generate_func_examples.py | 66 + .github/workflows/_generate_readme.py | 11 +- README.md | 462 ++-- function_examples.md | 2522 ++++++++++++++++++ 4 files changed, 2826 insertions(+), 235 deletions(-) create mode 100644 .github/workflows/_generate_func_examples.py create mode 100644 function_examples.md diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py new file mode 100644 index 00000000..91fffab5 --- /dev/null +++ b/.github/workflows/_generate_func_examples.py @@ -0,0 +1,66 @@ +import inspect +import os +import typing +import sempy_labs +import sempy_labs.migration +import sempy_labs.report +import sempy_labs.directlake +from sempy_labs.tom import TOMWrapper +import sempy_labs.lakehouse + +dirs = { + sempy_labs: 'labs', + sempy_labs.directlake: 'directlake', + sempy_labs.lakehouse: 'lake', + sempy_labs.migration: 'migration', + sempy_labs.report: 'rep', + TOMWrapper: 'tom', +} + +link_prefix = "https://semantic-link-labs.readthedocs.io/en/stable/" +tab = ' ' +skip_functions = ['connect_semantic_model', '__init__', 'close'] + +markdown_example = '## Function Examples\n' +# Function Examples +for d, d_alias in dirs.items(): + d_name = d.__name__ + for attr_name in dir(d): + attr = getattr(d, attr_name) + if inspect.isfunction(attr): + if attr_name not in skip_functions: + link = f"{link_prefix}{d_name}.html#{d_name}.{attr_name}" + if d_alias == 'tom': + link = f"{link_prefix}sempy_labs.{d_alias}.html#sempy_labs.{d_alias}.{d_name}.{attr_name}" + sig = inspect.signature(attr) + markdown_example += f"\n### [{attr_name}]({link})\n```python" + markdown_example += "\nimport sempy_labs as labs" + if d_alias == 'tom': + markdown_example += "\nfrom sempy_labs.tom import connect_semantic_model" + tf = 'True' + markdown_example += f"\nwith connect_semantic_model(dataset='', workspace='', readonly={tf}) as tom:" + elif d_alias != 'labs': + markdown_example += f"\nimport {d_name} as {d_alias}" + func_print = f"{d_alias}.{attr_name}(" + if d_alias == 'tom': + markdown_example += f"\n{tab}{func_print}" + else: + markdown_example += f"\n{func_print}" + for param_name, param in sig.parameters.items(): + if param_name not in ['kwargs', 'self']: + param_value = "''" + param_type = param.annotation if param.annotation != inspect._empty else "Unknown" + if typing.get_origin(param_type) is typing.Union: + args = typing.get_args(param_type) + if type(None) in args: + param_value = 'None' + p = f"{tab}{param_name}={param_value}," + if d_alias == 'tom': + markdown_example += f"\n{tab}{p}" + else: + markdown_example += f"\n{p}" + markdown_example += '\n)\n```\n' + +output_path = os.path.join('/root/semantic-link-labs', 'function_examples.md') +with open(output_path, 'w') as f: + f.write(markdown_example) diff --git a/.github/workflows/_generate_readme.py b/.github/workflows/_generate_readme.py index 7965a2d7..7648db16 100644 --- a/.github/workflows/_generate_readme.py +++ b/.github/workflows/_generate_readme.py @@ -85,6 +85,7 @@ attr = getattr(d, attr_name) if inspect.isfunction(attr): if attr_name not in skip_functions: + sig = inspect.signature(attr) category = 'General' if d_alias == 'migration': category = 'Direct Lake Migration' @@ -120,11 +121,11 @@ tom_cat = 'Tabular Object Model (TOM)' category_to_funcs[tom_cat] = defaultdict(list) for func, category in functions.items(): - if 'TOM' not in category: + if 'TOM' not in category: category_to_funcs[category].append(func) else: if 'vertipaq' in func or func in ['row_count', 'used_size', 'data_size', 'dictionary_size', 'total_size', 'cardinality', 'records_per_segment']: - category_to_funcs[tom_cat]["Vertipaq Stats"].append(func) + category_to_funcs[tom_cat]["Vertipaq Stats"].append(func) elif 'policy' in func or 'incremental' in func: category_to_funcs[tom_cat]["Incremental Refresh"].append(func) elif 'annotation' in func: @@ -160,6 +161,8 @@ else: sorted_category_to_funcs[category] = sorted(category_to_funcs[category]) +prefix = "https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#" + markdown_content += '## Function Categories\n' for category, funcs in sorted_category_to_funcs.items(): if 'TOM' in category: @@ -167,13 +170,13 @@ for sub_category, tom_list in funcs.items(): markdown_content += f"\n#### {sub_category}" for tom_func in tom_list: - markdown_content += f"\n* [{tom_func}](https://github.com/microsoft/semantic-link-labs)" + markdown_content += f"\n* [{tom_func}]({prefix}{tom_func})" markdown_content += '\n' markdown_content += '\n' else: markdown_content += f"\n### {category}" for func in funcs: - markdown_content += f"\n* [{func}](https://github.com/microsoft/semantic-link-labs)" + markdown_content += f"\n* [{func}]({prefix}{func})" markdown_content += '\n' markdown_content += """ diff --git a/README.md b/README.md index 01fa1e6f..21fcf1c7 100644 --- a/README.md +++ b/README.md @@ -57,288 +57,288 @@ An even better way to ensure the semantic-link-labs library is available in your ## Function Categories ### Admin -* [add_user_to_workspace](https://github.com/microsoft/semantic-link-labs) -* [assign_workspace_to_capacity](https://github.com/microsoft/semantic-link-labs) -* [assign_workspace_to_dataflow_storage](https://github.com/microsoft/semantic-link-labs) -* [delete_user_from_workspace](https://github.com/microsoft/semantic-link-labs) -* [deprovision_workspace_identity](https://github.com/microsoft/semantic-link-labs) -* [list_workspace_role_assignments](https://github.com/microsoft/semantic-link-labs) -* [list_workspace_users](https://github.com/microsoft/semantic-link-labs) -* [provision_workspace_identity](https://github.com/microsoft/semantic-link-labs) -* [set_workspace_default_storage_format](https://github.com/microsoft/semantic-link-labs) -* [unassign_workspace_from_capacity](https://github.com/microsoft/semantic-link-labs) -* [update_workspace_user](https://github.com/microsoft/semantic-link-labs) +* [add_user_to_workspace](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_user_to_workspace) +* [assign_workspace_to_capacity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#assign_workspace_to_capacity) +* [assign_workspace_to_dataflow_storage](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#assign_workspace_to_dataflow_storage) +* [delete_user_from_workspace](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#delete_user_from_workspace) +* [deprovision_workspace_identity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#deprovision_workspace_identity) +* [list_workspace_role_assignments](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_workspace_role_assignments) +* [list_workspace_users](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_workspace_users) +* [provision_workspace_identity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#provision_workspace_identity) +* [set_workspace_default_storage_format](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_workspace_default_storage_format) +* [unassign_workspace_from_capacity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#unassign_workspace_from_capacity) +* [update_workspace_user](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_workspace_user) ### Deployment Pipelines -* [list_deployment_pipeline_stage_items](https://github.com/microsoft/semantic-link-labs) -* [list_deployment_pipeline_stages](https://github.com/microsoft/semantic-link-labs) -* [list_deployment_pipelines](https://github.com/microsoft/semantic-link-labs) +* [list_deployment_pipeline_stage_items](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_deployment_pipeline_stage_items) +* [list_deployment_pipeline_stages](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_deployment_pipeline_stages) +* [list_deployment_pipelines](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_deployment_pipelines) ### Direct Lake -* [add_table_to_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [check_fallback_reason](https://github.com/microsoft/semantic-link-labs) -* [direct_lake_schema_compare](https://github.com/microsoft/semantic-link-labs) -* [direct_lake_schema_sync](https://github.com/microsoft/semantic-link-labs) -* [generate_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [get_direct_lake_guardrails](https://github.com/microsoft/semantic-link-labs) -* [get_direct_lake_lakehouse](https://github.com/microsoft/semantic-link-labs) -* [get_direct_lake_source](https://github.com/microsoft/semantic-link-labs) -* [get_direct_lake_sql_endpoint](https://github.com/microsoft/semantic-link-labs) -* [get_directlake_guardrails_for_sku](https://github.com/microsoft/semantic-link-labs) -* [get_shared_expression](https://github.com/microsoft/semantic-link-labs) -* [get_sku_size](https://github.com/microsoft/semantic-link-labs) -* [list_direct_lake_model_calc_tables](https://github.com/microsoft/semantic-link-labs) -* [show_unsupported_direct_lake_objects](https://github.com/microsoft/semantic-link-labs) -* [update_direct_lake_model_lakehouse_connection](https://github.com/microsoft/semantic-link-labs) -* [update_direct_lake_partition_entity](https://github.com/microsoft/semantic-link-labs) -* [warm_direct_lake_cache_isresident](https://github.com/microsoft/semantic-link-labs) -* [warm_direct_lake_cache_perspective](https://github.com/microsoft/semantic-link-labs) +* [add_table_to_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_table_to_direct_lake_semantic_model) +* [check_fallback_reason](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#check_fallback_reason) +* [direct_lake_schema_compare](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#direct_lake_schema_compare) +* [direct_lake_schema_sync](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#direct_lake_schema_sync) +* [generate_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#generate_direct_lake_semantic_model) +* [get_direct_lake_guardrails](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_guardrails) +* [get_direct_lake_lakehouse](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_lakehouse) +* [get_direct_lake_source](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_source) +* [get_direct_lake_sql_endpoint](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_sql_endpoint) +* [get_directlake_guardrails_for_sku](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_directlake_guardrails_for_sku) +* [get_shared_expression](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_shared_expression) +* [get_sku_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_sku_size) +* [list_direct_lake_model_calc_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_direct_lake_model_calc_tables) +* [show_unsupported_direct_lake_objects](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#show_unsupported_direct_lake_objects) +* [update_direct_lake_model_lakehouse_connection](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_direct_lake_model_lakehouse_connection) +* [update_direct_lake_partition_entity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_direct_lake_partition_entity) +* [warm_direct_lake_cache_isresident](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#warm_direct_lake_cache_isresident) +* [warm_direct_lake_cache_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#warm_direct_lake_cache_perspective) ### Direct Lake Migration -* [create_pqt_file](https://github.com/microsoft/semantic-link-labs) -* [migrate_calc_tables_to_lakehouse](https://github.com/microsoft/semantic-link-labs) -* [migrate_calc_tables_to_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [migrate_field_parameters](https://github.com/microsoft/semantic-link-labs) -* [migrate_model_objects_to_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [migrate_tables_columns_to_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [migration_validation](https://github.com/microsoft/semantic-link-labs) -* [refresh_calc_tables](https://github.com/microsoft/semantic-link-labs) +* [create_pqt_file](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_pqt_file) +* [migrate_calc_tables_to_lakehouse](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_calc_tables_to_lakehouse) +* [migrate_calc_tables_to_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_calc_tables_to_semantic_model) +* [migrate_field_parameters](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_field_parameters) +* [migrate_model_objects_to_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_model_objects_to_semantic_model) +* [migrate_tables_columns_to_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_tables_columns_to_semantic_model) +* [migration_validation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migration_validation) +* [refresh_calc_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#refresh_calc_tables) ### General -* [create_warehouse](https://github.com/microsoft/semantic-link-labs) -* [get_notebook_definition](https://github.com/microsoft/semantic-link-labs) -* [import_notebook_from_web](https://github.com/microsoft/semantic-link-labs) -* [list_capacities](https://github.com/microsoft/semantic-link-labs) -* [list_dashboards](https://github.com/microsoft/semantic-link-labs) -* [list_dataflow_storage_accounts](https://github.com/microsoft/semantic-link-labs) -* [list_dataflows](https://github.com/microsoft/semantic-link-labs) -* [list_warehouses](https://github.com/microsoft/semantic-link-labs) -* [update_item](https://github.com/microsoft/semantic-link-labs) +* [create_warehouse](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_warehouse) +* [get_notebook_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_notebook_definition) +* [import_notebook_from_web](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#import_notebook_from_web) +* [list_capacities](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_capacities) +* [list_dashboards](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_dashboards) +* [list_dataflow_storage_accounts](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_dataflow_storage_accounts) +* [list_dataflows](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_dataflows) +* [list_warehouses](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_warehouses) +* [update_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_item) ### Git -* [commit_to_git](https://github.com/microsoft/semantic-link-labs) -* [connect_workspace_to_git](https://github.com/microsoft/semantic-link-labs) -* [disconnect_workspace_from_git](https://github.com/microsoft/semantic-link-labs) -* [get_git_connection](https://github.com/microsoft/semantic-link-labs) -* [get_git_status](https://github.com/microsoft/semantic-link-labs) -* [initialize_git_connection](https://github.com/microsoft/semantic-link-labs) -* [update_from_git](https://github.com/microsoft/semantic-link-labs) +* [commit_to_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#commit_to_git) +* [connect_workspace_to_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#connect_workspace_to_git) +* [disconnect_workspace_from_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#disconnect_workspace_from_git) +* [get_git_connection](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_git_connection) +* [get_git_status](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_git_status) +* [initialize_git_connection](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#initialize_git_connection) +* [update_from_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_from_git) ### Helper Functions -* [create_abfss_path](https://github.com/microsoft/semantic-link-labs) -* [create_relationship_name](https://github.com/microsoft/semantic-link-labs) -* [format_dax_object_name](https://github.com/microsoft/semantic-link-labs) -* [generate_embedded_filter](https://github.com/microsoft/semantic-link-labs) -* [get_capacity_id](https://github.com/microsoft/semantic-link-labs) -* [get_capacity_name](https://github.com/microsoft/semantic-link-labs) -* [resolve_capacity_name](https://github.com/microsoft/semantic-link-labs) -* [resolve_dataset_id](https://github.com/microsoft/semantic-link-labs) -* [resolve_dataset_name](https://github.com/microsoft/semantic-link-labs) -* [resolve_item_type](https://github.com/microsoft/semantic-link-labs) -* [resolve_report_id](https://github.com/microsoft/semantic-link-labs) -* [resolve_report_name](https://github.com/microsoft/semantic-link-labs) -* [resolve_workspace_capacity](https://github.com/microsoft/semantic-link-labs) -* [save_as_delta_table](https://github.com/microsoft/semantic-link-labs) +* [create_abfss_path](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_abfss_path) +* [create_relationship_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_relationship_name) +* [format_dax_object_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#format_dax_object_name) +* [generate_embedded_filter](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#generate_embedded_filter) +* [get_capacity_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_capacity_id) +* [get_capacity_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_capacity_name) +* [resolve_capacity_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_capacity_name) +* [resolve_dataset_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_dataset_id) +* [resolve_dataset_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_dataset_name) +* [resolve_item_type](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_item_type) +* [resolve_report_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_report_id) +* [resolve_report_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_report_name) +* [resolve_workspace_capacity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_workspace_capacity) +* [save_as_delta_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#save_as_delta_table) ### Lakehouse -* [create_shortcut_onelake](https://github.com/microsoft/semantic-link-labs) -* [delete_shortcut](https://github.com/microsoft/semantic-link-labs) -* [export_model_to_onelake](https://github.com/microsoft/semantic-link-labs) -* [get_lakehouse_columns](https://github.com/microsoft/semantic-link-labs) -* [get_lakehouse_tables](https://github.com/microsoft/semantic-link-labs) -* [lakehouse_attached](https://github.com/microsoft/semantic-link-labs) -* [list_lakehouses](https://github.com/microsoft/semantic-link-labs) -* [list_shortcuts](https://github.com/microsoft/semantic-link-labs) -* [optimize_lakehouse_tables](https://github.com/microsoft/semantic-link-labs) -* [resolve_lakehouse_id](https://github.com/microsoft/semantic-link-labs) -* [resolve_lakehouse_name](https://github.com/microsoft/semantic-link-labs) -* [vacuum_lakehouse_tables](https://github.com/microsoft/semantic-link-labs) +* [create_shortcut_onelake](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_shortcut_onelake) +* [delete_shortcut](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#delete_shortcut) +* [export_model_to_onelake](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#export_model_to_onelake) +* [get_lakehouse_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_lakehouse_columns) +* [get_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_lakehouse_tables) +* [lakehouse_attached](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#lakehouse_attached) +* [list_lakehouses](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_lakehouses) +* [list_shortcuts](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_shortcuts) +* [optimize_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#optimize_lakehouse_tables) +* [resolve_lakehouse_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_lakehouse_id) +* [resolve_lakehouse_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_lakehouse_name) +* [vacuum_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#vacuum_lakehouse_tables) ### Model Optimization -* [import_vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs) -* [model_bpa_rules](https://github.com/microsoft/semantic-link-labs) -* [run_model_bpa](https://github.com/microsoft/semantic-link-labs) -* [run_model_bpa_bulk](https://github.com/microsoft/semantic-link-labs) -* [vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs) +* [import_vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#import_vertipaq_analyzer) +* [model_bpa_rules](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#model_bpa_rules) +* [run_model_bpa](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#run_model_bpa) +* [run_model_bpa_bulk](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#run_model_bpa_bulk) +* [vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#vertipaq_analyzer) ### Query Scale Out -* [disable_qso](https://github.com/microsoft/semantic-link-labs) -* [list_qso_settings](https://github.com/microsoft/semantic-link-labs) -* [qso_sync](https://github.com/microsoft/semantic-link-labs) -* [qso_sync_status](https://github.com/microsoft/semantic-link-labs) -* [set_qso](https://github.com/microsoft/semantic-link-labs) +* [disable_qso](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#disable_qso) +* [list_qso_settings](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_qso_settings) +* [qso_sync](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#qso_sync) +* [qso_sync_status](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#qso_sync_status) +* [set_qso](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_qso) ### Report -* [clone_report](https://github.com/microsoft/semantic-link-labs) -* [create_model_bpa_report](https://github.com/microsoft/semantic-link-labs) -* [create_report_from_reportjson](https://github.com/microsoft/semantic-link-labs) -* [export_report](https://github.com/microsoft/semantic-link-labs) -* [get_report_definition](https://github.com/microsoft/semantic-link-labs) -* [get_report_json](https://github.com/microsoft/semantic-link-labs) -* [launch_report](https://github.com/microsoft/semantic-link-labs) -* [report_rebind](https://github.com/microsoft/semantic-link-labs) -* [report_rebind_all](https://github.com/microsoft/semantic-link-labs) -* [update_report_from_reportjson](https://github.com/microsoft/semantic-link-labs) +* [clone_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clone_report) +* [create_model_bpa_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_model_bpa_report) +* [create_report_from_reportjson](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_report_from_reportjson) +* [export_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#export_report) +* [get_report_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_report_definition) +* [get_report_json](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_report_json) +* [launch_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#launch_report) +* [report_rebind](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#report_rebind) +* [report_rebind_all](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#report_rebind_all) +* [update_report_from_reportjson](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_report_from_reportjson) ### Semantic Model -* [backup_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [cancel_dataset_refresh](https://github.com/microsoft/semantic-link-labs) -* [clear_cache](https://github.com/microsoft/semantic-link-labs) -* [copy_semantic_model_backup_file](https://github.com/microsoft/semantic-link-labs) -* [create_blank_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [create_model_bpa_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [create_semantic_model_from_bim](https://github.com/microsoft/semantic-link-labs) -* [deploy_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [evaluate_dax_impersonation](https://github.com/microsoft/semantic-link-labs) -* [get_measure_dependencies](https://github.com/microsoft/semantic-link-labs) -* [get_model_calc_dependencies](https://github.com/microsoft/semantic-link-labs) -* [get_object_level_security](https://github.com/microsoft/semantic-link-labs) -* [get_semantic_model_bim](https://github.com/microsoft/semantic-link-labs) -* [is_default_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [list_reports_using_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [list_semantic_model_objects](https://github.com/microsoft/semantic-link-labs) -* [measure_dependency_tree](https://github.com/microsoft/semantic-link-labs) -* [refresh_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [restore_semantic_model](https://github.com/microsoft/semantic-link-labs) -* [set_semantic_model_storage_format](https://github.com/microsoft/semantic-link-labs) -* [translate_semantic_model](https://github.com/microsoft/semantic-link-labs) +* [backup_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#backup_semantic_model) +* [cancel_dataset_refresh](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#cancel_dataset_refresh) +* [clear_cache](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clear_cache) +* [copy_semantic_model_backup_file](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#copy_semantic_model_backup_file) +* [create_blank_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_blank_semantic_model) +* [create_model_bpa_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_model_bpa_semantic_model) +* [create_semantic_model_from_bim](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_semantic_model_from_bim) +* [deploy_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#deploy_semantic_model) +* [evaluate_dax_impersonation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#evaluate_dax_impersonation) +* [get_measure_dependencies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_measure_dependencies) +* [get_model_calc_dependencies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_model_calc_dependencies) +* [get_object_level_security](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_object_level_security) +* [get_semantic_model_bim](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_semantic_model_bim) +* [is_default_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_default_semantic_model) +* [list_reports_using_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_reports_using_semantic_model) +* [list_semantic_model_objects](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_semantic_model_objects) +* [measure_dependency_tree](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#measure_dependency_tree) +* [refresh_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#refresh_semantic_model) +* [restore_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#restore_semantic_model) +* [set_semantic_model_storage_format](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_semantic_model_storage_format) +* [translate_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#translate_semantic_model) ### Spark -* [create_custom_pool](https://github.com/microsoft/semantic-link-labs) -* [delete_custom_pool](https://github.com/microsoft/semantic-link-labs) -* [get_spark_settings](https://github.com/microsoft/semantic-link-labs) -* [list_custom_pools](https://github.com/microsoft/semantic-link-labs) -* [update_custom_pool](https://github.com/microsoft/semantic-link-labs) -* [update_spark_settings](https://github.com/microsoft/semantic-link-labs) +* [create_custom_pool](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_custom_pool) +* [delete_custom_pool](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#delete_custom_pool) +* [get_spark_settings](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_spark_settings) +* [list_custom_pools](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_custom_pools) +* [update_custom_pool](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_custom_pool) +* [update_spark_settings](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_spark_settings) ### [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet)) #### 'Add' functions -* [add_calculated_column](https://github.com/microsoft/semantic-link-labs) -* [add_calculated_table](https://github.com/microsoft/semantic-link-labs) -* [add_calculated_table_column](https://github.com/microsoft/semantic-link-labs) -* [add_calculation_group](https://github.com/microsoft/semantic-link-labs) -* [add_calculation_item](https://github.com/microsoft/semantic-link-labs) -* [add_data_column](https://github.com/microsoft/semantic-link-labs) -* [add_entity_partition](https://github.com/microsoft/semantic-link-labs) -* [add_expression](https://github.com/microsoft/semantic-link-labs) -* [add_field_parameter](https://github.com/microsoft/semantic-link-labs) -* [add_hierarchy](https://github.com/microsoft/semantic-link-labs) -* [add_m_partition](https://github.com/microsoft/semantic-link-labs) -* [add_measure](https://github.com/microsoft/semantic-link-labs) -* [add_relationship](https://github.com/microsoft/semantic-link-labs) -* [add_role](https://github.com/microsoft/semantic-link-labs) -* [add_table](https://github.com/microsoft/semantic-link-labs) -* [add_time_intelligence](https://github.com/microsoft/semantic-link-labs) +* [add_calculated_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculated_column) +* [add_calculated_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculated_table) +* [add_calculated_table_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculated_table_column) +* [add_calculation_group](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculation_group) +* [add_calculation_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculation_item) +* [add_data_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_data_column) +* [add_entity_partition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_entity_partition) +* [add_expression](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_expression) +* [add_field_parameter](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_field_parameter) +* [add_hierarchy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_hierarchy) +* [add_m_partition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_m_partition) +* [add_measure](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_measure) +* [add_relationship](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_relationship) +* [add_role](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_role) +* [add_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_table) +* [add_time_intelligence](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_time_intelligence) #### 'All' functions -* [all_calculated_columns](https://github.com/microsoft/semantic-link-labs) -* [all_calculated_tables](https://github.com/microsoft/semantic-link-labs) -* [all_calculation_groups](https://github.com/microsoft/semantic-link-labs) -* [all_calculation_items](https://github.com/microsoft/semantic-link-labs) -* [all_columns](https://github.com/microsoft/semantic-link-labs) -* [all_date_tables](https://github.com/microsoft/semantic-link-labs) -* [all_hierarchies](https://github.com/microsoft/semantic-link-labs) -* [all_hybrid_tables](https://github.com/microsoft/semantic-link-labs) -* [all_levels](https://github.com/microsoft/semantic-link-labs) -* [all_measures](https://github.com/microsoft/semantic-link-labs) -* [all_partitions](https://github.com/microsoft/semantic-link-labs) -* [all_rls](https://github.com/microsoft/semantic-link-labs) +* [all_calculated_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculated_columns) +* [all_calculated_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculated_tables) +* [all_calculation_groups](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculation_groups) +* [all_calculation_items](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculation_items) +* [all_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_columns) +* [all_date_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_date_tables) +* [all_hierarchies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_hierarchies) +* [all_hybrid_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_hybrid_tables) +* [all_levels](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_levels) +* [all_measures](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_measures) +* [all_partitions](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_partitions) +* [all_rls](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_rls) #### 'Remove' functions -* [remove_alternate_of](https://github.com/microsoft/semantic-link-labs) -* [remove_object](https://github.com/microsoft/semantic-link-labs) -* [remove_sort_by_column](https://github.com/microsoft/semantic-link-labs) +* [remove_alternate_of](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_alternate_of) +* [remove_object](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_object) +* [remove_sort_by_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_sort_by_column) #### 'Set' functions -* [set_aggregations](https://github.com/microsoft/semantic-link-labs) -* [set_alternate_of](https://github.com/microsoft/semantic-link-labs) -* [set_data_coverage_definition](https://github.com/microsoft/semantic-link-labs) -* [set_data_type](https://github.com/microsoft/semantic-link-labs) -* [set_direct_lake_behavior](https://github.com/microsoft/semantic-link-labs) -* [set_encoding_hint](https://github.com/microsoft/semantic-link-labs) -* [set_is_available_in_mdx](https://github.com/microsoft/semantic-link-labs) -* [set_kpi](https://github.com/microsoft/semantic-link-labs) -* [set_ols](https://github.com/microsoft/semantic-link-labs) -* [set_rls](https://github.com/microsoft/semantic-link-labs) -* [set_sort_by_column](https://github.com/microsoft/semantic-link-labs) -* [set_summarize_by](https://github.com/microsoft/semantic-link-labs) +* [set_aggregations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_aggregations) +* [set_alternate_of](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_alternate_of) +* [set_data_coverage_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_data_coverage_definition) +* [set_data_type](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_data_type) +* [set_direct_lake_behavior](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_direct_lake_behavior) +* [set_encoding_hint](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_encoding_hint) +* [set_is_available_in_mdx](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_is_available_in_mdx) +* [set_kpi](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_kpi) +* [set_ols](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_ols) +* [set_rls](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_rls) +* [set_sort_by_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_sort_by_column) +* [set_summarize_by](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_summarize_by) #### 'Update' functions -* [update_calculation_item](https://github.com/microsoft/semantic-link-labs) -* [update_column](https://github.com/microsoft/semantic-link-labs) -* [update_m_partition](https://github.com/microsoft/semantic-link-labs) -* [update_measure](https://github.com/microsoft/semantic-link-labs) -* [update_role](https://github.com/microsoft/semantic-link-labs) +* [update_calculation_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_calculation_item) +* [update_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_column) +* [update_m_partition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_m_partition) +* [update_measure](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_measure) +* [update_role](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_role) #### 'Used-in' and dependency functions -* [depends_on](https://github.com/microsoft/semantic-link-labs) -* [fully_qualified_measures](https://github.com/microsoft/semantic-link-labs) -* [referenced_by](https://github.com/microsoft/semantic-link-labs) -* [unqualified_columns](https://github.com/microsoft/semantic-link-labs) -* [used_in_calc_item](https://github.com/microsoft/semantic-link-labs) -* [used_in_data_coverage_definition](https://github.com/microsoft/semantic-link-labs) -* [used_in_hierarchies](https://github.com/microsoft/semantic-link-labs) -* [used_in_levels](https://github.com/microsoft/semantic-link-labs) -* [used_in_relationships](https://github.com/microsoft/semantic-link-labs) -* [used_in_rls](https://github.com/microsoft/semantic-link-labs) -* [used_in_sort_by](https://github.com/microsoft/semantic-link-labs) +* [depends_on](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#depends_on) +* [fully_qualified_measures](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#fully_qualified_measures) +* [referenced_by](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#referenced_by) +* [unqualified_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#unqualified_columns) +* [used_in_calc_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_calc_item) +* [used_in_data_coverage_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_data_coverage_definition) +* [used_in_hierarchies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_hierarchies) +* [used_in_levels](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_levels) +* [used_in_relationships](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_relationships) +* [used_in_rls](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_rls) +* [used_in_sort_by](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_sort_by) #### Annotations -* [clear_annotations](https://github.com/microsoft/semantic-link-labs) -* [get_annotation_value](https://github.com/microsoft/semantic-link-labs) -* [get_annotations](https://github.com/microsoft/semantic-link-labs) -* [remove_annotation](https://github.com/microsoft/semantic-link-labs) -* [set_annotation](https://github.com/microsoft/semantic-link-labs) +* [clear_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clear_annotations) +* [get_annotation_value](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_annotation_value) +* [get_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_annotations) +* [remove_annotation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_annotation) +* [set_annotation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_annotation) #### Extended Properties -* [clear_extended_properties](https://github.com/microsoft/semantic-link-labs) -* [get_extended_properties](https://github.com/microsoft/semantic-link-labs) -* [get_extended_property_value](https://github.com/microsoft/semantic-link-labs) -* [remove_extended_property](https://github.com/microsoft/semantic-link-labs) -* [set_extended_property](https://github.com/microsoft/semantic-link-labs) +* [clear_extended_properties](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clear_extended_properties) +* [get_extended_properties](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_extended_properties) +* [get_extended_property_value](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_extended_property_value) +* [remove_extended_property](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_extended_property) +* [set_extended_property](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_extended_property) #### Incremental Refresh -* [add_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) -* [apply_refresh_policy](https://github.com/microsoft/semantic-link-labs) -* [has_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) -* [show_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) -* [update_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs) +* [add_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_incremental_refresh_policy) +* [apply_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#apply_refresh_policy) +* [has_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_incremental_refresh_policy) +* [show_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#show_incremental_refresh_policy) +* [update_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_incremental_refresh_policy) #### Misc functions -* [has_aggs](https://github.com/microsoft/semantic-link-labs) -* [has_date_table](https://github.com/microsoft/semantic-link-labs) -* [has_hybrid_table](https://github.com/microsoft/semantic-link-labs) -* [is_agg_table](https://github.com/microsoft/semantic-link-labs) -* [is_auto_date_table](https://github.com/microsoft/semantic-link-labs) -* [is_calculated_table](https://github.com/microsoft/semantic-link-labs) -* [is_date_table](https://github.com/microsoft/semantic-link-labs) -* [is_direct_lake](https://github.com/microsoft/semantic-link-labs) -* [is_direct_lake_using_view](https://github.com/microsoft/semantic-link-labs) -* [is_field_parameter](https://github.com/microsoft/semantic-link-labs) -* [is_hybrid_table](https://github.com/microsoft/semantic-link-labs) -* [mark_as_date_table](https://github.com/microsoft/semantic-link-labs) +* [has_aggs](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_aggs) +* [has_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_date_table) +* [has_hybrid_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_hybrid_table) +* [is_agg_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_agg_table) +* [is_auto_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_auto_date_table) +* [is_calculated_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_calculated_table) +* [is_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_date_table) +* [is_direct_lake](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_direct_lake) +* [is_direct_lake_using_view](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_direct_lake_using_view) +* [is_field_parameter](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_field_parameter) +* [is_hybrid_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_hybrid_table) +* [mark_as_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#mark_as_date_table) #### Perspectives -* [add_perspective](https://github.com/microsoft/semantic-link-labs) -* [add_to_perspective](https://github.com/microsoft/semantic-link-labs) -* [in_perspective](https://github.com/microsoft/semantic-link-labs) -* [remove_from_perspective](https://github.com/microsoft/semantic-link-labs) +* [add_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_perspective) +* [add_to_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_to_perspective) +* [in_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#in_perspective) +* [remove_from_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_from_perspective) #### Translations -* [add_translation](https://github.com/microsoft/semantic-link-labs) -* [remove_translation](https://github.com/microsoft/semantic-link-labs) -* [set_translation](https://github.com/microsoft/semantic-link-labs) +* [add_translation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_translation) +* [remove_translation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_translation) +* [set_translation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_translation) #### Vertipaq Stats -* [cardinality](https://github.com/microsoft/semantic-link-labs) -* [data_size](https://github.com/microsoft/semantic-link-labs) -* [dictionary_size](https://github.com/microsoft/semantic-link-labs) -* [records_per_segment](https://github.com/microsoft/semantic-link-labs) -* [remove_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs) -* [row_count](https://github.com/microsoft/semantic-link-labs) -* [set_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs) -* [total_size](https://github.com/microsoft/semantic-link-labs) -* [used_size](https://github.com/microsoft/semantic-link-labs) +* [cardinality](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#cardinality) +* [data_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#data_size) +* [dictionary_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#dictionary_size) +* [records_per_segment](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#records_per_segment) +* [remove_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_vertipaq_annotations) +* [row_count](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#row_count) +* [set_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_vertipaq_annotations) +* [total_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#total_size) +* [used_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_size) --- diff --git a/function_examples.md b/function_examples.md new file mode 100644 index 00000000..1a721bf5 --- /dev/null +++ b/function_examples.md @@ -0,0 +1,2522 @@ +## Function Examples + +### [add_user_to_workspace](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.add_user_to_workspace) +```python +import sempy_labs as labs +labs.add_user_to_workspace( + email_address='', + role_name='', + principal_type=None, + workspace=None, +) +``` + +### [assign_workspace_to_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.assign_workspace_to_capacity) +```python +import sempy_labs as labs +labs.assign_workspace_to_capacity( + capacity_name='', + workspace=None, +) +``` + +### [assign_workspace_to_dataflow_storage](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.assign_workspace_to_dataflow_storage) +```python +import sempy_labs as labs +labs.assign_workspace_to_dataflow_storage( + dataflow_storage_account='', + workspace=None, +) +``` + +### [backup_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model) +```python +import sempy_labs as labs +labs.backup_semantic_model( + dataset='', + file_path='', + allow_overwrite=None, + apply_compression=None, + workspace=None, +) +``` + +### [cancel_dataset_refresh](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.cancel_dataset_refresh) +```python +import sempy_labs as labs +labs.cancel_dataset_refresh( + dataset='', + request_id=None, + workspace=None, +) +``` + +### [clear_cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache) +```python +import sempy_labs as labs +labs.clear_cache( + dataset='', + workspace=None, +) +``` + +### [commit_to_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.commit_to_git) +```python +import sempy_labs as labs +labs.commit_to_git( + comment='', + item_ids='', + workspace=None, +) +``` + +### [connect_workspace_to_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.connect_workspace_to_git) +```python +import sempy_labs as labs +labs.connect_workspace_to_git( + organization_name='', + project_name='', + repository_name='', + branch_name='', + directory_name='', + git_provider_type='', + workspace=None, +) +``` + +### [copy_semantic_model_backup_file](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file) +```python +import sempy_labs as labs +labs.copy_semantic_model_backup_file( + source_workspace='', + target_workspace='', + source_file_name='', + target_file_name='', + storage_account_url='', + key_vault_uri='', + key_vault_account_key='', + source_file_system=None, + target_file_system=None, +) +``` + +### [create_abfss_path](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_abfss_path) +```python +import sempy_labs as labs +labs.create_abfss_path( + lakehouse_id='', + lakehouse_workspace_id='', + delta_table_name='', +) +``` + +### [create_blank_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_blank_semantic_model) +```python +import sempy_labs as labs +labs.create_blank_semantic_model( + dataset='', + compatibility_level='', + workspace=None, +) +``` + +### [create_custom_pool](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_custom_pool) +```python +import sempy_labs as labs +labs.create_custom_pool( + pool_name='', + node_size='', + min_node_count='', + max_node_count='', + min_executors='', + max_executors='', + node_family=None, + auto_scale_enabled=None, + dynamic_executor_allocation_enabled=None, + workspace=None, +) +``` + +### [create_model_bpa_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_model_bpa_semantic_model) +```python +import sempy_labs as labs +labs.create_model_bpa_semantic_model( + dataset=None, + lakehouse=None, + lakehouse_workspace=None, +) +``` + +### [create_relationship_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_relationship_name) +```python +import sempy_labs as labs +labs.create_relationship_name( + from_table='', + from_column='', + to_table='', + to_column='', +) +``` + +### [create_semantic_model_from_bim](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_semantic_model_from_bim) +```python +import sempy_labs as labs +labs.create_semantic_model_from_bim( + dataset='', + bim_file='', + workspace=None, +) +``` + +### [create_warehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_warehouse) +```python +import sempy_labs as labs +labs.create_warehouse( + warehouse='', + description=None, + workspace=None, +) +``` + +### [delete_custom_pool](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.delete_custom_pool) +```python +import sempy_labs as labs +labs.delete_custom_pool( + pool_name='', + workspace=None, +) +``` + +### [delete_user_from_workspace](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.delete_user_from_workspace) +```python +import sempy_labs as labs +labs.delete_user_from_workspace( + email_address='', + workspace=None, +) +``` + +### [deploy_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model) +```python +import sempy_labs as labs +labs.deploy_semantic_model( + source_dataset='', + source_workspace=None, + target_dataset=None, + target_workspace=None, + refresh_target_dataset=None, +) +``` + +### [deprovision_workspace_identity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deprovision_workspace_identity) +```python +import sempy_labs as labs +labs.deprovision_workspace_identity( + workspace=None, +) +``` + +### [disable_qso](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.disable_qso) +```python +import sempy_labs as labs +labs.disable_qso( + dataset='', + workspace=None, +) +``` + +### [disconnect_workspace_from_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.disconnect_workspace_from_git) +```python +import sempy_labs as labs +labs.disconnect_workspace_from_git( + workspace=None, +) +``` + +### [evaluate_dax_impersonation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation) +```python +import sempy_labs as labs +labs.evaluate_dax_impersonation( + dataset='', + dax_query='', + user_name=None, + workspace=None, +) +``` + +### [export_model_to_onelake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.export_model_to_onelake) +```python +import sempy_labs as labs +labs.export_model_to_onelake( + dataset='', + workspace=None, + destination_lakehouse=None, + destination_workspace=None, +) +``` + +### [format_dax_object_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.format_dax_object_name) +```python +import sempy_labs as labs +labs.format_dax_object_name( + table='', + column='', +) +``` + +### [generate_embedded_filter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.generate_embedded_filter) +```python +import sempy_labs as labs +labs.generate_embedded_filter( + filter='', +) +``` + +### [get_capacity_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_capacity_id) +```python +import sempy_labs as labs +labs.get_capacity_id( + workspace=None, +) +``` + +### [get_capacity_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_capacity_name) +```python +import sempy_labs as labs +labs.get_capacity_name( + workspace=None, +) +``` + +### [get_direct_lake_sql_endpoint](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_direct_lake_sql_endpoint) +```python +import sempy_labs as labs +labs.get_direct_lake_sql_endpoint( + dataset='', + workspace=None, +) +``` + +### [get_git_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_git_connection) +```python +import sempy_labs as labs +labs.get_git_connection( + workspace=None, +) +``` + +### [get_git_status](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_git_status) +```python +import sempy_labs as labs +labs.get_git_status( + workspace=None, +) +``` + +### [get_measure_dependencies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_measure_dependencies) +```python +import sempy_labs as labs +labs.get_measure_dependencies( + dataset='', + workspace=None, +) +``` + +### [get_model_calc_dependencies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_model_calc_dependencies) +```python +import sempy_labs as labs +labs.get_model_calc_dependencies( + dataset='', + workspace=None, +) +``` + +### [get_notebook_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_notebook_definition) +```python +import sempy_labs as labs +labs.get_notebook_definition( + notebook_name='', + workspace=None, + decode=None, +) +``` + +### [get_object_level_security](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_object_level_security) +```python +import sempy_labs as labs +labs.get_object_level_security( + dataset='', + workspace=None, +) +``` + +### [get_semantic_model_bim](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_semantic_model_bim) +```python +import sempy_labs as labs +labs.get_semantic_model_bim( + dataset='', + workspace=None, + save_to_file_name=None, + lakehouse_workspace=None, +) +``` + +### [get_spark_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_spark_settings) +```python +import sempy_labs as labs +labs.get_spark_settings( + workspace=None, +) +``` + +### [import_notebook_from_web](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_notebook_from_web) +```python +import sempy_labs as labs +labs.import_notebook_from_web( + notebook_name='', + url='', + description=None, + workspace=None, +) +``` + +### [import_vertipaq_analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) +```python +import sempy_labs as labs +labs.import_vertipaq_analyzer( + folder_path='', + file_name='', +) +``` + +### [initialize_git_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.initialize_git_connection) +```python +import sempy_labs as labs +labs.initialize_git_connection( + workspace=None, +) +``` + +### [is_default_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.is_default_semantic_model) +```python +import sempy_labs as labs +labs.is_default_semantic_model( + dataset='', + workspace=None, +) +``` + +### [list_capacities](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_capacities) +```python +import sempy_labs as labs +labs.list_capacities( +) +``` + +### [list_custom_pools](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_custom_pools) +```python +import sempy_labs as labs +labs.list_custom_pools( + workspace=None, +) +``` + +### [list_dashboards](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dashboards) +```python +import sempy_labs as labs +labs.list_dashboards( + workspace=None, +) +``` + +### [list_dataflow_storage_accounts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflow_storage_accounts) +```python +import sempy_labs as labs +labs.list_dataflow_storage_accounts( +) +``` + +### [list_dataflows](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflows) +```python +import sempy_labs as labs +labs.list_dataflows( + workspace=None, +) +``` + +### [list_deployment_pipeline_stage_items](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipeline_stage_items) +```python +import sempy_labs as labs +labs.list_deployment_pipeline_stage_items( + deployment_pipeline='', + stage_name='', +) +``` + +### [list_deployment_pipeline_stages](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipeline_stages) +```python +import sempy_labs as labs +labs.list_deployment_pipeline_stages( + deployment_pipeline='', +) +``` + +### [list_deployment_pipelines](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipelines) +```python +import sempy_labs as labs +labs.list_deployment_pipelines( +) +``` + +### [list_lakehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_lakehouses) +```python +import sempy_labs as labs +labs.list_lakehouses( + workspace=None, +) +``` + +### [list_qso_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_qso_settings) +```python +import sempy_labs as labs +labs.list_qso_settings( + dataset=None, + workspace=None, +) +``` + +### [list_reports_using_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_reports_using_semantic_model) +```python +import sempy_labs as labs +labs.list_reports_using_semantic_model( + dataset='', + workspace=None, +) +``` + +### [list_semantic_model_objects](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_objects) +```python +import sempy_labs as labs +labs.list_semantic_model_objects( + dataset='', + workspace=None, +) +``` + +### [list_shortcuts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_shortcuts) +```python +import sempy_labs as labs +labs.list_shortcuts( + lakehouse=None, + workspace=None, +) +``` + +### [list_warehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_warehouses) +```python +import sempy_labs as labs +labs.list_warehouses( + workspace=None, +) +``` + +### [list_workspace_role_assignments](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_workspace_role_assignments) +```python +import sempy_labs as labs +labs.list_workspace_role_assignments( + workspace=None, +) +``` + +### [list_workspace_users](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_workspace_users) +```python +import sempy_labs as labs +labs.list_workspace_users( + workspace=None, +) +``` + +### [measure_dependency_tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree) +```python +import sempy_labs as labs +labs.measure_dependency_tree( + dataset='', + measure_name='', + workspace=None, +) +``` + +### [model_bpa_rules](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.model_bpa_rules) +```python +import sempy_labs as labs +labs.model_bpa_rules( + dataset='', + workspace=None, + dependencies=None, +) +``` + +### [provision_workspace_identity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.provision_workspace_identity) +```python +import sempy_labs as labs +labs.provision_workspace_identity( + workspace=None, +) +``` + +### [qso_sync](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.qso_sync) +```python +import sempy_labs as labs +labs.qso_sync( + dataset='', + workspace=None, +) +``` + +### [qso_sync_status](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.qso_sync_status) +```python +import sempy_labs as labs +labs.qso_sync_status( + dataset='', + workspace=None, +) +``` + +### [refresh_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.refresh_semantic_model) +```python +import sempy_labs as labs +labs.refresh_semantic_model( + dataset='', + tables=None, + partitions=None, + refresh_type=None, + retry_count=None, + apply_refresh_policy=None, + max_parallelism=None, + workspace=None, +) +``` + +### [resolve_capacity_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_capacity_name) +```python +import sempy_labs as labs +labs.resolve_capacity_name( + capacity_id=None, +) +``` + +### [resolve_dataset_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_dataset_id) +```python +import sempy_labs as labs +labs.resolve_dataset_id( + dataset='', + workspace=None, +) +``` + +### [resolve_dataset_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_dataset_name) +```python +import sempy_labs as labs +labs.resolve_dataset_name( + dataset_id='', + workspace=None, +) +``` + +### [resolve_item_type](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_item_type) +```python +import sempy_labs as labs +labs.resolve_item_type( + item_id='', + workspace=None, +) +``` + +### [resolve_lakehouse_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_lakehouse_id) +```python +import sempy_labs as labs +labs.resolve_lakehouse_id( + lakehouse='', + workspace=None, +) +``` + +### [resolve_lakehouse_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_lakehouse_name) +```python +import sempy_labs as labs +labs.resolve_lakehouse_name( + lakehouse_id=None, + workspace=None, +) +``` + +### [resolve_report_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_report_id) +```python +import sempy_labs as labs +labs.resolve_report_id( + report='', + workspace=None, +) +``` + +### [resolve_report_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_report_name) +```python +import sempy_labs as labs +labs.resolve_report_name( + report_id='', + workspace=None, +) +``` + +### [resolve_workspace_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_workspace_capacity) +```python +import sempy_labs as labs +labs.resolve_workspace_capacity( + workspace=None, +) +``` + +### [restore_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model) +```python +import sempy_labs as labs +labs.restore_semantic_model( + dataset='', + file_path='', + allow_overwrite=None, + ignore_incompatibilities=None, + force_restore=None, + workspace=None, +) +``` + +### [run_model_bpa](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) +```python +import sempy_labs as labs +labs.run_model_bpa( + dataset='', + rules=None, + workspace=None, + export=None, + return_dataframe=None, + extended=None, + language=None, +) +``` + +### [run_model_bpa_bulk](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa_bulk) +```python +import sempy_labs as labs +labs.run_model_bpa_bulk( + rules=None, + extended=None, + language=None, + workspace=None, + skip_models=None, +) +``` + +### [save_as_delta_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.save_as_delta_table) +```python +import sempy_labs as labs +labs.save_as_delta_table( + dataframe='', + delta_table_name='', + write_mode='', + merge_schema=None, + lakehouse=None, + workspace=None, +) +``` + +### [set_qso](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_qso) +```python +import sempy_labs as labs +labs.set_qso( + dataset='', + auto_sync=None, + max_read_only_replicas=None, + workspace=None, +) +``` + +### [set_semantic_model_storage_format](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_semantic_model_storage_format) +```python +import sempy_labs as labs +labs.set_semantic_model_storage_format( + dataset='', + storage_format='', + workspace=None, +) +``` + +### [set_workspace_default_storage_format](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_workspace_default_storage_format) +```python +import sempy_labs as labs +labs.set_workspace_default_storage_format( + storage_format='', + workspace=None, +) +``` + +### [translate_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model) +```python +import sempy_labs as labs +labs.translate_semantic_model( + dataset='', + languages='', + exclude_characters=None, + workspace=None, +) +``` + +### [unassign_workspace_from_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.unassign_workspace_from_capacity) +```python +import sempy_labs as labs +labs.unassign_workspace_from_capacity( + workspace=None, +) +``` + +### [update_custom_pool](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_custom_pool) +```python +import sempy_labs as labs +labs.update_custom_pool( + pool_name='', + node_size=None, + min_node_count=None, + max_node_count=None, + min_executors=None, + max_executors=None, + node_family=None, + auto_scale_enabled=None, + dynamic_executor_allocation_enabled=None, + workspace=None, +) +``` + +### [update_from_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_from_git) +```python +import sempy_labs as labs +labs.update_from_git( + remote_commit_hash='', + conflict_resolution_policy='', + workspace_head=None, + allow_override=None, + workspace=None, +) +``` + +### [update_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_item) +```python +import sempy_labs as labs +labs.update_item( + item_type='', + current_name='', + new_name='', + description=None, + workspace=None, +) +``` + +### [update_spark_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_spark_settings) +```python +import sempy_labs as labs +labs.update_spark_settings( + automatic_log_enabled=None, + high_concurrency_enabled=None, + customize_compute_enabled=None, + default_pool_name=None, + max_node_count=None, + max_executors=None, + environment_name=None, + runtime_version=None, + workspace=None, +) +``` + +### [update_workspace_user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_workspace_user) +```python +import sempy_labs as labs +labs.update_workspace_user( + email_address='', + role_name='', + principal_type=None, + workspace=None, +) +``` + +### [vertipaq_analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer) +```python +import sempy_labs as labs +labs.vertipaq_analyzer( + dataset='', + workspace=None, + export=None, + read_stats_from_data=None, +) +``` + +### [add_table_to_direct_lake_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.add_table_to_direct_lake_semantic_model) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.add_table_to_direct_lake_semantic_model( + dataset='', + table_name='', + lakehouse_table_name='', + refresh=None, + workspace=None, +) +``` + +### [check_fallback_reason](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.check_fallback_reason) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.check_fallback_reason( + dataset='', + workspace=None, +) +``` + +### [direct_lake_schema_compare](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.direct_lake_schema_compare) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.direct_lake_schema_compare( + dataset='', + workspace=None, +) +``` + +### [direct_lake_schema_sync](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.direct_lake_schema_sync) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.direct_lake_schema_sync( + dataset='', + workspace=None, + add_to_model=None, +) +``` + +### [generate_direct_lake_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.generate_direct_lake_semantic_model( + dataset='', + lakehouse_tables='', + workspace=None, + lakehouse=None, + lakehouse_workspace=None, + overwrite=None, + refresh=None, +) +``` + +### [get_direct_lake_guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_guardrails) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.get_direct_lake_guardrails( +) +``` + +### [get_direct_lake_lakehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_lakehouse) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.get_direct_lake_lakehouse( + dataset='', + workspace=None, + lakehouse=None, + lakehouse_workspace=None, +) +``` + +### [get_direct_lake_source](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_source) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.get_direct_lake_source( + dataset='', + workspace=None, +) +``` + +### [get_directlake_guardrails_for_sku](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_directlake_guardrails_for_sku) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.get_directlake_guardrails_for_sku( + sku_size='', +) +``` + +### [get_shared_expression](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_shared_expression) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.get_shared_expression( + lakehouse=None, + workspace=None, +) +``` + +### [get_sku_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_sku_size) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.get_sku_size( + workspace=None, +) +``` + +### [list_direct_lake_model_calc_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.list_direct_lake_model_calc_tables) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.list_direct_lake_model_calc_tables( + dataset='', + workspace=None, +) +``` + +### [show_unsupported_direct_lake_objects](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.show_unsupported_direct_lake_objects) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.show_unsupported_direct_lake_objects( + dataset='', + workspace=None, +) +``` + +### [update_direct_lake_model_lakehouse_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_model_lakehouse_connection) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.update_direct_lake_model_lakehouse_connection( + dataset='', + workspace=None, + lakehouse=None, + lakehouse_workspace=None, +) +``` + +### [update_direct_lake_partition_entity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_partition_entity) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.update_direct_lake_partition_entity( + dataset='', + table_name='', + entity_name='', + workspace=None, +) +``` + +### [warm_direct_lake_cache_isresident](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_isresident) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.warm_direct_lake_cache_isresident( + dataset='', + workspace=None, +) +``` + +### [warm_direct_lake_cache_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_perspective) +```python +import sempy_labs as labs +import sempy_labs.directlake as directlake +directlake.warm_direct_lake_cache_perspective( + dataset='', + perspective='', + add_dependencies=None, + workspace=None, +) +``` + +### [create_shortcut_onelake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.create_shortcut_onelake) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.create_shortcut_onelake( + table_name='', + source_lakehouse='', + source_workspace='', + destination_lakehouse='', + destination_workspace=None, + shortcut_name=None, +) +``` + +### [delete_shortcut](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.delete_shortcut) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.delete_shortcut( + shortcut_name='', + lakehouse=None, + workspace=None, +) +``` + +### [get_lakehouse_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_columns) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.get_lakehouse_columns( + lakehouse=None, + workspace=None, +) +``` + +### [get_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.get_lakehouse_tables( + lakehouse=None, + workspace=None, + extended=None, + count_rows=None, + export=None, +) +``` + +### [lakehouse_attached](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.lakehouse_attached) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.lakehouse_attached( +) +``` + +### [optimize_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.optimize_lakehouse_tables) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.optimize_lakehouse_tables( + tables=None, + lakehouse=None, + workspace=None, +) +``` + +### [vacuum_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.vacuum_lakehouse_tables) +```python +import sempy_labs as labs +import sempy_labs.lakehouse as lake +lake.vacuum_lakehouse_tables( + tables=None, + lakehouse=None, + workspace=None, + retain_n_hours=None, +) +``` + +### [create_pqt_file](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.create_pqt_file) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.create_pqt_file( + dataset='', + workspace=None, + file_name=None, +) +``` + +### [migrate_calc_tables_to_lakehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_calc_tables_to_lakehouse) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.migrate_calc_tables_to_lakehouse( + dataset='', + new_dataset='', + workspace=None, + new_dataset_workspace=None, + lakehouse=None, + lakehouse_workspace=None, +) +``` + +### [migrate_calc_tables_to_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_calc_tables_to_semantic_model) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.migrate_calc_tables_to_semantic_model( + dataset='', + new_dataset='', + workspace=None, + new_dataset_workspace=None, + lakehouse=None, + lakehouse_workspace=None, +) +``` + +### [migrate_field_parameters](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_field_parameters) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.migrate_field_parameters( + dataset='', + new_dataset='', + workspace=None, + new_dataset_workspace=None, +) +``` + +### [migrate_model_objects_to_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_model_objects_to_semantic_model) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.migrate_model_objects_to_semantic_model( + dataset='', + new_dataset='', + workspace=None, + new_dataset_workspace=None, +) +``` + +### [migrate_tables_columns_to_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_tables_columns_to_semantic_model) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.migrate_tables_columns_to_semantic_model( + dataset='', + new_dataset='', + workspace=None, + new_dataset_workspace=None, + lakehouse=None, + lakehouse_workspace=None, +) +``` + +### [migration_validation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migration_validation) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.migration_validation( + dataset='', + new_dataset='', + workspace=None, + new_dataset_workspace=None, +) +``` + +### [refresh_calc_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.refresh_calc_tables) +```python +import sempy_labs as labs +import sempy_labs.migration as migration +migration.refresh_calc_tables( + dataset='', + workspace=None, +) +``` + +### [clone_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.clone_report) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.clone_report( + report='', + cloned_report='', + workspace=None, + target_workspace=None, + target_dataset=None, + target_dataset_workspace=None, +) +``` + +### [create_model_bpa_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.create_model_bpa_report) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.create_model_bpa_report( + report=None, + dataset=None, + dataset_workspace=None, +) +``` + +### [create_report_from_reportjson](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.create_report_from_reportjson) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.create_report_from_reportjson( + report='', + dataset='', + report_json='', + theme_json=None, + workspace=None, +) +``` + +### [export_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.export_report) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.export_report( + report='', + export_format='', + file_name=None, + bookmark_name=None, + page_name=None, + visual_name=None, + report_filter=None, + workspace=None, +) +``` + +### [get_report_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.get_report_definition) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.get_report_definition( + report='', + workspace=None, +) +``` + +### [get_report_json](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.get_report_json) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.get_report_json( + report='', + workspace=None, + save_to_file_name=None, +) +``` + +### [launch_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.launch_report) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.launch_report( + report='', + workspace=None, +) +``` + +### [report_rebind](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.report_rebind( + report='', + dataset='', + report_workspace=None, + dataset_workspace=None, +) +``` + +### [report_rebind_all](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind_all) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.report_rebind_all( + dataset='', + new_dataset='', + dataset_workspace=None, + new_dataset_workpace=None, + report_workspace=None, +) +``` + +### [update_report_from_reportjson](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.update_report_from_reportjson) +```python +import sempy_labs as labs +import sempy_labs.report as rep +rep.update_report_from_reportjson( + report='', + report_json='', + workspace=None, +) +``` + +### [add_calculated_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculated_column) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_calculated_column( + table_name='', + column_name='', + expression='', + data_type='', + format_string=None, + hidden=None, + description=None, + display_folder=None, + data_category=None, + key=None, + summarize_by=None, +) +``` + +### [add_calculated_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculated_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_calculated_table( + name='', + expression='', + description=None, + data_category=None, + hidden=None, +) +``` + +### [add_calculated_table_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculated_table_column) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_calculated_table_column( + table_name='', + column_name='', + source_column='', + data_type='', + format_string=None, + hidden=None, + description=None, + display_folder=None, + data_category=None, + key=None, + summarize_by=None, +) +``` + +### [add_calculation_group](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculation_group) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_calculation_group( + name='', + precedence='', + description=None, + hidden=None, +) +``` + +### [add_calculation_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculation_item) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_calculation_item( + table_name='', + calculation_item_name='', + expression='', + ordinal=None, + description=None, + format_string_expression=None, +) +``` + +### [add_data_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_data_column) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_data_column( + table_name='', + column_name='', + source_column='', + data_type='', + format_string=None, + hidden=None, + description=None, + display_folder=None, + data_category=None, + key=None, + summarize_by=None, +) +``` + +### [add_entity_partition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_entity_partition) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_entity_partition( + table_name='', + entity_name='', + expression=None, + description=None, +) +``` + +### [add_expression](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_expression) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_expression( + name='', + expression='', + description=None, +) +``` + +### [add_field_parameter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_field_parameter) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_field_parameter( + table_name='', + objects='', + object_names='', +) +``` + +### [add_hierarchy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_hierarchy) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_hierarchy( + table_name='', + hierarchy_name='', + columns='', + levels=None, + hierarchy_description=None, + hierarchy_hidden=None, +) +``` + +### [add_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_incremental_refresh_policy) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_incremental_refresh_policy( + table_name='', + column_name='', + start_date='', + end_date='', + incremental_granularity='', + incremental_periods='', + rolling_window_granularity='', + rolling_window_periods='', + only_refresh_complete_days=None, + detect_data_changes_column=None, +) +``` + +### [add_m_partition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_m_partition) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_m_partition( + table_name='', + partition_name='', + expression='', + mode=None, + description=None, +) +``` + +### [add_measure](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_measure) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_measure( + table_name='', + measure_name='', + expression='', + format_string=None, + hidden=None, + description=None, + display_folder=None, + format_string_expression=None, +) +``` + +### [add_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_perspective) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_perspective( + perspective_name='', +) +``` + +### [add_relationship](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_relationship) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_relationship( + from_table='', + from_column='', + to_table='', + to_column='', + from_cardinality='', + to_cardinality='', + cross_filtering_behavior=None, + is_active=None, + security_filtering_behavior=None, + rely_on_referential_integrity=None, +) +``` + +### [add_role](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_role) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_role( + role_name='', + model_permission=None, + description=None, +) +``` + +### [add_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_table( + name='', + description=None, + data_category=None, + hidden=None, +) +``` + +### [add_time_intelligence](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_time_intelligence) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_time_intelligence( + measure_name='', + date_table='', + time_intel='', +) +``` + +### [add_to_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_to_perspective) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_to_perspective( + object='', + perspective_name='', +) +``` + +### [add_translation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_translation) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.add_translation( + language='', +) +``` + +### [all_calculated_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculated_columns) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_calculated_columns( +) +``` + +### [all_calculated_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculated_tables) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_calculated_tables( +) +``` + +### [all_calculation_groups](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculation_groups) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_calculation_groups( +) +``` + +### [all_calculation_items](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculation_items) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_calculation_items( +) +``` + +### [all_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_columns) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_columns( +) +``` + +### [all_date_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_date_tables) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_date_tables( +) +``` + +### [all_hierarchies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_hierarchies) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_hierarchies( +) +``` + +### [all_hybrid_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_hybrid_tables) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_hybrid_tables( +) +``` + +### [all_levels](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_levels) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_levels( +) +``` + +### [all_measures](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_measures) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_measures( +) +``` + +### [all_partitions](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_partitions) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_partitions( +) +``` + +### [all_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_rls) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.all_rls( +) +``` + +### [apply_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.apply_refresh_policy) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.apply_refresh_policy( + table_name='', + effective_date=None, + refresh=None, + max_parallelism=None, +) +``` + +### [cardinality](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.cardinality) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.cardinality( + column='', +) +``` + +### [clear_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.clear_annotations) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.clear_annotations( + object='', +) +``` + +### [clear_extended_properties](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.clear_extended_properties) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.clear_extended_properties( + object='', +) +``` + +### [data_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.data_size) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.data_size( + column='', +) +``` + +### [depends_on](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.depends_on) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.depends_on( + object='', + dependencies='', +) +``` + +### [dictionary_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.dictionary_size) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.dictionary_size( + column='', +) +``` + +### [fully_qualified_measures](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.fully_qualified_measures) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.fully_qualified_measures( + object='', + dependencies='', +) +``` + +### [get_annotation_value](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_annotation_value) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.get_annotation_value( + object='', + name='', +) +``` + +### [get_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_annotations) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.get_annotations( + object='', +) +``` + +### [get_extended_properties](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_extended_properties) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.get_extended_properties( + object='', +) +``` + +### [get_extended_property_value](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_extended_property_value) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.get_extended_property_value( + object='', + name='', +) +``` + +### [has_aggs](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_aggs) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.has_aggs( +) +``` + +### [has_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_date_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.has_date_table( +) +``` + +### [has_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_hybrid_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.has_hybrid_table( +) +``` + +### [has_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_incremental_refresh_policy) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.has_incremental_refresh_policy( + table_name='', +) +``` + +### [in_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.in_perspective) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.in_perspective( + object='', + perspective_name='', +) +``` + +### [is_agg_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_agg_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_agg_table( + table_name='', +) +``` + +### [is_auto_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_auto_date_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_auto_date_table( + table_name='', +) +``` + +### [is_calculated_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_calculated_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_calculated_table( + table_name='', +) +``` + +### [is_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_date_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_date_table( + table_name='', +) +``` + +### [is_direct_lake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_direct_lake( +) +``` + +### [is_direct_lake_using_view](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake_using_view) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_direct_lake_using_view( +) +``` + +### [is_field_parameter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_field_parameter) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_field_parameter( + table_name='', +) +``` + +### [is_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_hybrid_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.is_hybrid_table( + table_name='', +) +``` + +### [mark_as_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.mark_as_date_table) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.mark_as_date_table( + table_name='', + column_name='', +) +``` + +### [records_per_segment](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.records_per_segment) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.records_per_segment( + object='', +) +``` + +### [referenced_by](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.referenced_by) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.referenced_by( + object='', + dependencies='', +) +``` + +### [remove_alternate_of](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_alternate_of) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_alternate_of( + table_name='', + column_name='', +) +``` + +### [remove_annotation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_annotation) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_annotation( + object='', + name='', +) +``` + +### [remove_extended_property](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_extended_property) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_extended_property( + object='', + name='', +) +``` + +### [remove_from_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_from_perspective) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_from_perspective( + object='', + perspective_name='', +) +``` + +### [remove_object](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_object) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_object( + object='', +) +``` + +### [remove_sort_by_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_sort_by_column) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_sort_by_column( + table_name='', + column_name='', +) +``` + +### [remove_translation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_translation) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_translation( + object='', + language='', +) +``` + +### [remove_vertipaq_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_vertipaq_annotations) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.remove_vertipaq_annotations( +) +``` + +### [row_count](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.row_count) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.row_count( + object='', +) +``` + +### [set_aggregations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_aggregations) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_aggregations( + table_name='', + agg_table_name='', +) +``` + +### [set_alternate_of](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_alternate_of) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_alternate_of( + table_name='', + column_name='', + summarization_type='', + base_table='', + base_column=None, +) +``` + +### [set_annotation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_annotation) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_annotation( + object='', + name='', + value='', +) +``` + +### [set_data_coverage_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_data_coverage_definition) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_data_coverage_definition( + table_name='', + partition_name='', + expression='', +) +``` + +### [set_data_type](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_data_type) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_data_type( + table_name='', + column_name='', + value='', +) +``` + +### [set_direct_lake_behavior](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_direct_lake_behavior) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_direct_lake_behavior( + direct_lake_behavior='', +) +``` + +### [set_encoding_hint](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_encoding_hint) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_encoding_hint( + table_name='', + column_name='', + value='', +) +``` + +### [set_extended_property](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_extended_property) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_extended_property( + object='', + extended_property_type='', + name='', + value='', +) +``` + +### [set_is_available_in_mdx](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_is_available_in_mdx) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_is_available_in_mdx( + table_name='', + column_name='', + value=None, +) +``` + +### [set_kpi](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_kpi) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_kpi( + measure_name='', + target='', + lower_bound='', + upper_bound='', + lower_mid_bound=None, + upper_mid_bound=None, + status_type=None, + status_graphic=None, +) +``` + +### [set_ols](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_ols) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_ols( + role_name='', + table_name='', + column_name='', + permission='', +) +``` + +### [set_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_rls) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_rls( + role_name='', + table_name='', + filter_expression='', +) +``` + +### [set_sort_by_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_sort_by_column) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_sort_by_column( + table_name='', + column_name='', + sort_by_column='', +) +``` + +### [set_summarize_by](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_summarize_by) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_summarize_by( + table_name='', + column_name='', + value=None, +) +``` + +### [set_translation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_translation) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_translation( + object='', + language='', + property='', + value='', +) +``` + +### [set_vertipaq_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_vertipaq_annotations) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.set_vertipaq_annotations( +) +``` + +### [show_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.show_incremental_refresh_policy) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.show_incremental_refresh_policy( + table_name='', +) +``` + +### [total_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.total_size) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.total_size( + object='', +) +``` + +### [unqualified_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.unqualified_columns) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.unqualified_columns( + object='', + dependencies='', +) +``` + +### [update_calculation_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_calculation_item) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.update_calculation_item( + table_name='', + calculation_item_name='', + expression=None, + ordinal=None, + description=None, + format_string_expression=None, +) +``` + +### [update_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_column) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.update_column( + table_name='', + column_name='', + source_column=None, + data_type=None, + expression=None, + format_string=None, + hidden=None, + description=None, + display_folder=None, + data_category=None, + key=None, + summarize_by=None, +) +``` + +### [update_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_incremental_refresh_policy) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.update_incremental_refresh_policy( + table_name='', + incremental_granularity='', + incremental_periods='', + rolling_window_granularity='', + rolling_window_periods='', + only_refresh_complete_days=None, + detect_data_changes_column=None, +) +``` + +### [update_m_partition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_m_partition) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.update_m_partition( + table_name='', + partition_name='', + expression=None, + mode=None, + description=None, +) +``` + +### [update_measure](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_measure) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.update_measure( + measure_name='', + expression=None, + format_string=None, + hidden=None, + description=None, + display_folder=None, + format_string_expression=None, +) +``` + +### [update_role](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_role) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.update_role( + role_name='', + model_permission=None, + description=None, +) +``` + +### [used_in_calc_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_calc_item) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_calc_item( + object='', + dependencies='', +) +``` + +### [used_in_data_coverage_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_data_coverage_definition) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_data_coverage_definition( + object='', + dependencies='', +) +``` + +### [used_in_hierarchies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_hierarchies) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_hierarchies( + column='', +) +``` + +### [used_in_levels](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_levels) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_levels( + column='', +) +``` + +### [used_in_relationships](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_relationships) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_relationships( + object='', +) +``` + +### [used_in_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_rls) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_rls( + object='', + dependencies='', +) +``` + +### [used_in_sort_by](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_sort_by) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_in_sort_by( + column='', +) +``` + +### [used_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_size) +```python +import sempy_labs as labs +from sempy_labs.tom import connect_semantic_model +with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: + tom.used_size( + object='', +) +``` From 05d62ed92162fbbd3b9b06ffd80c48c1e8ba2440 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 5 Sep 2024 13:04:53 +0300 Subject: [PATCH 03/18] added logic for parameters --- .github/workflows/_generate_func_examples.py | 45 +- function_examples.md | 934 +++++++++---------- notebooks/Tabular Object Model.ipynb | 2 +- src/sempy_labs/_generate_semantic_model.py | 2 +- src/sempy_labs/tom/_model.py | 4 +- 5 files changed, 496 insertions(+), 491 deletions(-) diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py index 91fffab5..9901a048 100644 --- a/.github/workflows/_generate_func_examples.py +++ b/.github/workflows/_generate_func_examples.py @@ -46,20 +46,51 @@ markdown_example += f"\n{tab}{func_print}" else: markdown_example += f"\n{func_print}" + params = [param for param_name, param in sig.parameters.items() if param_name not in ['kwargs', 'self']] + param_count = len(params) for param_name, param in sig.parameters.items(): + is_optional = False if param_name not in ['kwargs', 'self']: - param_value = "''" - param_type = param.annotation if param.annotation != inspect._empty else "Unknown" - if typing.get_origin(param_type) is typing.Union: - args = typing.get_args(param_type) - if type(None) in args: - param_value = 'None' + param_value = '' + if param.default != inspect.Parameter.empty: + param_value = param.default + is_optional = True + elif param_name == 'dataset': + param_value = "AdvWorks" + elif param_name in ['email_address', 'user_name']: + param_value = 'hello@goodbye.com' + elif param_name == 'languages': + param_value = ['it-IT', 'zh-CN'] + elif param_name == 'dax_query': + param_value = 'EVALUATE SUMMARIZECOLUMNS("MyMeasure", 1)' + elif param_name == 'column': + param_value = 'tom.model.Tables["Geography"].Columns["GeographyKey"]' + elif param_name in ['object']: + if attr_name in ['row_count', 'total_size', 'used_in_relationships', 'used_in_rls', 'set_translation']: + param_value = 'tom.model.Tables["Sales"]' + elif attr_name in ['records_per_segment']: + param_value = 'tom.model.Tables["Sales"].Partitions["Sales"]' + elif attr_name in ['used_size']: + param_value = 'tom.model.Tables["Geography"].Hierarchies["Geo Hierarchy"]' + elif attr_name in ['fully_qualified_measures']: + param_value = 'tom.model.Tables["Sales"].Measures["Sales Amount"]' + elif param_name == 'dependencies': + param_value = 'labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace)' + + if param_value not in [None, True, False] and not isinstance(param_value, list) and param_name not in ['object', 'column', 'dependencies']: + param_value = f"'{param_value}'" p = f"{tab}{param_name}={param_value}," + if is_optional: + p += " # This parameter is optional" if d_alias == 'tom': markdown_example += f"\n{tab}{p}" else: markdown_example += f"\n{p}" - markdown_example += '\n)\n```\n' + closing = ")\n```\n" + if param_count == 0: + markdown_example += closing + else: + markdown_example += f"\n{closing}" output_path = os.path.join('/root/semantic-link-labs', 'function_examples.md') with open(output_path, 'w') as f: diff --git a/function_examples.md b/function_examples.md index 1a721bf5..08fde774 100644 --- a/function_examples.md +++ b/function_examples.md @@ -4,10 +4,10 @@ ```python import sempy_labs as labs labs.add_user_to_workspace( - email_address='', + email_address='hello@goodbye.com', role_name='', - principal_type=None, - workspace=None, + principal_type='User', # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -16,7 +16,7 @@ labs.add_user_to_workspace( import sempy_labs as labs labs.assign_workspace_to_capacity( capacity_name='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -25,7 +25,7 @@ labs.assign_workspace_to_capacity( import sempy_labs as labs labs.assign_workspace_to_dataflow_storage( dataflow_storage_account='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -33,11 +33,11 @@ labs.assign_workspace_to_dataflow_storage( ```python import sempy_labs as labs labs.backup_semantic_model( - dataset='', + dataset='AdvWorks', file_path='', - allow_overwrite=None, - apply_compression=None, - workspace=None, + allow_overwrite=True, # This parameter is optional + apply_compression=True, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -45,9 +45,9 @@ labs.backup_semantic_model( ```python import sempy_labs as labs labs.cancel_dataset_refresh( - dataset='', - request_id=None, - workspace=None, + dataset='AdvWorks', + request_id=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -55,8 +55,8 @@ labs.cancel_dataset_refresh( ```python import sempy_labs as labs labs.clear_cache( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -65,8 +65,8 @@ labs.clear_cache( import sempy_labs as labs labs.commit_to_git( comment='', - item_ids='', - workspace=None, + item_ids=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -79,8 +79,8 @@ labs.connect_workspace_to_git( repository_name='', branch_name='', directory_name='', - git_provider_type='', - workspace=None, + git_provider_type='AzureDevOps', # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -95,8 +95,8 @@ labs.copy_semantic_model_backup_file( storage_account_url='', key_vault_uri='', key_vault_account_key='', - source_file_system=None, - target_file_system=None, + source_file_system='power-bi-backup', # This parameter is optional + target_file_system='power-bi-backup', # This parameter is optional ) ``` @@ -114,9 +114,9 @@ labs.create_abfss_path( ```python import sempy_labs as labs labs.create_blank_semantic_model( - dataset='', - compatibility_level='', - workspace=None, + dataset='AdvWorks', + compatibility_level='1605', # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -130,10 +130,10 @@ labs.create_custom_pool( max_node_count='', min_executors='', max_executors='', - node_family=None, - auto_scale_enabled=None, - dynamic_executor_allocation_enabled=None, - workspace=None, + node_family='MemoryOptimized', # This parameter is optional + auto_scale_enabled=True, # This parameter is optional + dynamic_executor_allocation_enabled=True, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -141,9 +141,9 @@ labs.create_custom_pool( ```python import sempy_labs as labs labs.create_model_bpa_semantic_model( - dataset=None, - lakehouse=None, - lakehouse_workspace=None, + dataset='ModelBPA', # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -162,9 +162,9 @@ labs.create_relationship_name( ```python import sempy_labs as labs labs.create_semantic_model_from_bim( - dataset='', + dataset='AdvWorks', bim_file='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -173,8 +173,8 @@ labs.create_semantic_model_from_bim( import sempy_labs as labs labs.create_warehouse( warehouse='', - description=None, - workspace=None, + description=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -183,7 +183,7 @@ labs.create_warehouse( import sempy_labs as labs labs.delete_custom_pool( pool_name='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -191,8 +191,8 @@ labs.delete_custom_pool( ```python import sempy_labs as labs labs.delete_user_from_workspace( - email_address='', - workspace=None, + email_address='hello@goodbye.com', + workspace=None, # This parameter is optional ) ``` @@ -201,10 +201,10 @@ labs.delete_user_from_workspace( import sempy_labs as labs labs.deploy_semantic_model( source_dataset='', - source_workspace=None, - target_dataset=None, - target_workspace=None, - refresh_target_dataset=None, + source_workspace=None, # This parameter is optional + target_dataset=None, # This parameter is optional + target_workspace=None, # This parameter is optional + refresh_target_dataset=True, # This parameter is optional ) ``` @@ -212,7 +212,7 @@ labs.deploy_semantic_model( ```python import sempy_labs as labs labs.deprovision_workspace_identity( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -220,8 +220,8 @@ labs.deprovision_workspace_identity( ```python import sempy_labs as labs labs.disable_qso( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -229,7 +229,7 @@ labs.disable_qso( ```python import sempy_labs as labs labs.disconnect_workspace_from_git( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -237,10 +237,10 @@ labs.disconnect_workspace_from_git( ```python import sempy_labs as labs labs.evaluate_dax_impersonation( - dataset='', - dax_query='', - user_name=None, - workspace=None, + dataset='AdvWorks', + dax_query='EVALUATE SUMMARIZECOLUMNS("MyMeasure", 1)', + user_name=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -248,10 +248,10 @@ labs.evaluate_dax_impersonation( ```python import sempy_labs as labs labs.export_model_to_onelake( - dataset='', - workspace=None, - destination_lakehouse=None, - destination_workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + destination_lakehouse=None, # This parameter is optional + destination_workspace=None, # This parameter is optional ) ``` @@ -260,7 +260,7 @@ labs.export_model_to_onelake( import sempy_labs as labs labs.format_dax_object_name( table='', - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -276,7 +276,7 @@ labs.generate_embedded_filter( ```python import sempy_labs as labs labs.get_capacity_id( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -284,7 +284,7 @@ labs.get_capacity_id( ```python import sempy_labs as labs labs.get_capacity_name( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -292,8 +292,8 @@ labs.get_capacity_name( ```python import sempy_labs as labs labs.get_direct_lake_sql_endpoint( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -301,7 +301,7 @@ labs.get_direct_lake_sql_endpoint( ```python import sempy_labs as labs labs.get_git_connection( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -309,7 +309,7 @@ labs.get_git_connection( ```python import sempy_labs as labs labs.get_git_status( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -317,8 +317,8 @@ labs.get_git_status( ```python import sempy_labs as labs labs.get_measure_dependencies( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -326,8 +326,8 @@ labs.get_measure_dependencies( ```python import sempy_labs as labs labs.get_model_calc_dependencies( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -336,8 +336,8 @@ labs.get_model_calc_dependencies( import sempy_labs as labs labs.get_notebook_definition( notebook_name='', - workspace=None, - decode=None, + workspace=None, # This parameter is optional + decode=True, # This parameter is optional ) ``` @@ -345,8 +345,8 @@ labs.get_notebook_definition( ```python import sempy_labs as labs labs.get_object_level_security( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -354,10 +354,10 @@ labs.get_object_level_security( ```python import sempy_labs as labs labs.get_semantic_model_bim( - dataset='', - workspace=None, - save_to_file_name=None, - lakehouse_workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + save_to_file_name=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -365,7 +365,7 @@ labs.get_semantic_model_bim( ```python import sempy_labs as labs labs.get_spark_settings( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -375,8 +375,8 @@ import sempy_labs as labs labs.import_notebook_from_web( notebook_name='', url='', - description=None, - workspace=None, + description=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -393,7 +393,7 @@ labs.import_vertipaq_analyzer( ```python import sempy_labs as labs labs.initialize_git_connection( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -401,23 +401,22 @@ labs.initialize_git_connection( ```python import sempy_labs as labs labs.is_default_semantic_model( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` ### [list_capacities](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_capacities) ```python import sempy_labs as labs -labs.list_capacities( -) +labs.list_capacities() ``` ### [list_custom_pools](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_custom_pools) ```python import sempy_labs as labs labs.list_custom_pools( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -425,22 +424,21 @@ labs.list_custom_pools( ```python import sempy_labs as labs labs.list_dashboards( - workspace=None, + workspace=None, # This parameter is optional ) ``` ### [list_dataflow_storage_accounts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflow_storage_accounts) ```python import sempy_labs as labs -labs.list_dataflow_storage_accounts( -) +labs.list_dataflow_storage_accounts() ``` ### [list_dataflows](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflows) ```python import sempy_labs as labs labs.list_dataflows( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -464,15 +462,14 @@ labs.list_deployment_pipeline_stages( ### [list_deployment_pipelines](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipelines) ```python import sempy_labs as labs -labs.list_deployment_pipelines( -) +labs.list_deployment_pipelines() ``` ### [list_lakehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_lakehouses) ```python import sempy_labs as labs labs.list_lakehouses( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -480,8 +477,8 @@ labs.list_lakehouses( ```python import sempy_labs as labs labs.list_qso_settings( - dataset=None, - workspace=None, + dataset=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -489,8 +486,8 @@ labs.list_qso_settings( ```python import sempy_labs as labs labs.list_reports_using_semantic_model( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -498,8 +495,8 @@ labs.list_reports_using_semantic_model( ```python import sempy_labs as labs labs.list_semantic_model_objects( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -507,8 +504,8 @@ labs.list_semantic_model_objects( ```python import sempy_labs as labs labs.list_shortcuts( - lakehouse=None, - workspace=None, + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -516,7 +513,7 @@ labs.list_shortcuts( ```python import sempy_labs as labs labs.list_warehouses( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -524,7 +521,7 @@ labs.list_warehouses( ```python import sempy_labs as labs labs.list_workspace_role_assignments( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -532,7 +529,7 @@ labs.list_workspace_role_assignments( ```python import sempy_labs as labs labs.list_workspace_users( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -540,9 +537,9 @@ labs.list_workspace_users( ```python import sempy_labs as labs labs.measure_dependency_tree( - dataset='', + dataset='AdvWorks', measure_name='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -550,9 +547,9 @@ labs.measure_dependency_tree( ```python import sempy_labs as labs labs.model_bpa_rules( - dataset='', - workspace=None, - dependencies=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + dependencies=None, # This parameter is optional ) ``` @@ -560,7 +557,7 @@ labs.model_bpa_rules( ```python import sempy_labs as labs labs.provision_workspace_identity( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -568,8 +565,8 @@ labs.provision_workspace_identity( ```python import sempy_labs as labs labs.qso_sync( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -577,8 +574,8 @@ labs.qso_sync( ```python import sempy_labs as labs labs.qso_sync_status( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -586,14 +583,14 @@ labs.qso_sync_status( ```python import sempy_labs as labs labs.refresh_semantic_model( - dataset='', - tables=None, - partitions=None, - refresh_type=None, - retry_count=None, - apply_refresh_policy=None, - max_parallelism=None, - workspace=None, + dataset='AdvWorks', + tables=None, # This parameter is optional + partitions=None, # This parameter is optional + refresh_type=None, # This parameter is optional + retry_count=0, # This parameter is optional + apply_refresh_policy=True, # This parameter is optional + max_parallelism='10', # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -601,7 +598,7 @@ labs.refresh_semantic_model( ```python import sempy_labs as labs labs.resolve_capacity_name( - capacity_id=None, + capacity_id=None, # This parameter is optional ) ``` @@ -609,8 +606,8 @@ labs.resolve_capacity_name( ```python import sempy_labs as labs labs.resolve_dataset_id( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -619,7 +616,7 @@ labs.resolve_dataset_id( import sempy_labs as labs labs.resolve_dataset_name( dataset_id='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -628,7 +625,7 @@ labs.resolve_dataset_name( import sempy_labs as labs labs.resolve_item_type( item_id='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -637,7 +634,7 @@ labs.resolve_item_type( import sempy_labs as labs labs.resolve_lakehouse_id( lakehouse='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -645,8 +642,8 @@ labs.resolve_lakehouse_id( ```python import sempy_labs as labs labs.resolve_lakehouse_name( - lakehouse_id=None, - workspace=None, + lakehouse_id=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -655,7 +652,7 @@ labs.resolve_lakehouse_name( import sempy_labs as labs labs.resolve_report_id( report='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -664,7 +661,7 @@ labs.resolve_report_id( import sempy_labs as labs labs.resolve_report_name( report_id='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -672,7 +669,7 @@ labs.resolve_report_name( ```python import sempy_labs as labs labs.resolve_workspace_capacity( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -680,12 +677,12 @@ labs.resolve_workspace_capacity( ```python import sempy_labs as labs labs.restore_semantic_model( - dataset='', + dataset='AdvWorks', file_path='', - allow_overwrite=None, - ignore_incompatibilities=None, - force_restore=None, - workspace=None, + allow_overwrite=True, # This parameter is optional + ignore_incompatibilities=True, # This parameter is optional + force_restore=False, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -693,13 +690,13 @@ labs.restore_semantic_model( ```python import sempy_labs as labs labs.run_model_bpa( - dataset='', - rules=None, - workspace=None, - export=None, - return_dataframe=None, - extended=None, - language=None, + dataset='AdvWorks', + rules=None, # This parameter is optional + workspace=None, # This parameter is optional + export=False, # This parameter is optional + return_dataframe=False, # This parameter is optional + extended=False, # This parameter is optional + language=None, # This parameter is optional ) ``` @@ -707,11 +704,11 @@ labs.run_model_bpa( ```python import sempy_labs as labs labs.run_model_bpa_bulk( - rules=None, - extended=None, - language=None, - workspace=None, - skip_models=None, + rules=None, # This parameter is optional + extended=False, # This parameter is optional + language=None, # This parameter is optional + workspace=None, # This parameter is optional + skip_models=['ModelBPA', 'Fabric Capacity Metrics'], # This parameter is optional ) ``` @@ -722,9 +719,9 @@ labs.save_as_delta_table( dataframe='', delta_table_name='', write_mode='', - merge_schema=None, - lakehouse=None, - workspace=None, + merge_schema=False, # This parameter is optional + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -732,10 +729,10 @@ labs.save_as_delta_table( ```python import sempy_labs as labs labs.set_qso( - dataset='', - auto_sync=None, - max_read_only_replicas=None, - workspace=None, + dataset='AdvWorks', + auto_sync=True, # This parameter is optional + max_read_only_replicas='-1', # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -743,9 +740,9 @@ labs.set_qso( ```python import sempy_labs as labs labs.set_semantic_model_storage_format( - dataset='', + dataset='AdvWorks', storage_format='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -754,7 +751,7 @@ labs.set_semantic_model_storage_format( import sempy_labs as labs labs.set_workspace_default_storage_format( storage_format='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -762,10 +759,10 @@ labs.set_workspace_default_storage_format( ```python import sempy_labs as labs labs.translate_semantic_model( - dataset='', - languages='', - exclude_characters=None, - workspace=None, + dataset='AdvWorks', + languages=['it-IT', 'zh-CN'], + exclude_characters=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -773,7 +770,7 @@ labs.translate_semantic_model( ```python import sempy_labs as labs labs.unassign_workspace_from_capacity( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -782,15 +779,15 @@ labs.unassign_workspace_from_capacity( import sempy_labs as labs labs.update_custom_pool( pool_name='', - node_size=None, - min_node_count=None, - max_node_count=None, - min_executors=None, - max_executors=None, - node_family=None, - auto_scale_enabled=None, - dynamic_executor_allocation_enabled=None, - workspace=None, + node_size=None, # This parameter is optional + min_node_count=None, # This parameter is optional + max_node_count=None, # This parameter is optional + min_executors=None, # This parameter is optional + max_executors=None, # This parameter is optional + node_family=None, # This parameter is optional + auto_scale_enabled=None, # This parameter is optional + dynamic_executor_allocation_enabled=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -800,9 +797,9 @@ import sempy_labs as labs labs.update_from_git( remote_commit_hash='', conflict_resolution_policy='', - workspace_head=None, - allow_override=None, - workspace=None, + workspace_head=None, # This parameter is optional + allow_override=False, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -813,8 +810,8 @@ labs.update_item( item_type='', current_name='', new_name='', - description=None, - workspace=None, + description=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -822,15 +819,15 @@ labs.update_item( ```python import sempy_labs as labs labs.update_spark_settings( - automatic_log_enabled=None, - high_concurrency_enabled=None, - customize_compute_enabled=None, - default_pool_name=None, - max_node_count=None, - max_executors=None, - environment_name=None, - runtime_version=None, - workspace=None, + automatic_log_enabled=None, # This parameter is optional + high_concurrency_enabled=None, # This parameter is optional + customize_compute_enabled=None, # This parameter is optional + default_pool_name=None, # This parameter is optional + max_node_count=None, # This parameter is optional + max_executors=None, # This parameter is optional + environment_name=None, # This parameter is optional + runtime_version=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -838,10 +835,10 @@ labs.update_spark_settings( ```python import sempy_labs as labs labs.update_workspace_user( - email_address='', + email_address='hello@goodbye.com', role_name='', - principal_type=None, - workspace=None, + principal_type='User', # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -849,10 +846,10 @@ labs.update_workspace_user( ```python import sempy_labs as labs labs.vertipaq_analyzer( - dataset='', - workspace=None, - export=None, - read_stats_from_data=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + export=None, # This parameter is optional + read_stats_from_data=False, # This parameter is optional ) ``` @@ -861,11 +858,11 @@ labs.vertipaq_analyzer( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.add_table_to_direct_lake_semantic_model( - dataset='', + dataset='AdvWorks', table_name='', lakehouse_table_name='', - refresh=None, - workspace=None, + refresh=True, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -874,8 +871,8 @@ directlake.add_table_to_direct_lake_semantic_model( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.check_fallback_reason( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -884,8 +881,8 @@ directlake.check_fallback_reason( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.direct_lake_schema_compare( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -894,9 +891,9 @@ directlake.direct_lake_schema_compare( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.direct_lake_schema_sync( - dataset='', - workspace=None, - add_to_model=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + add_to_model=False, # This parameter is optional ) ``` @@ -905,13 +902,13 @@ directlake.direct_lake_schema_sync( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.generate_direct_lake_semantic_model( - dataset='', + dataset='AdvWorks', lakehouse_tables='', - workspace=None, - lakehouse=None, - lakehouse_workspace=None, - overwrite=None, - refresh=None, + workspace=None, # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional + overwrite=False, # This parameter is optional + refresh=True, # This parameter is optional ) ``` @@ -919,8 +916,7 @@ directlake.generate_direct_lake_semantic_model( ```python import sempy_labs as labs import sempy_labs.directlake as directlake -directlake.get_direct_lake_guardrails( -) +directlake.get_direct_lake_guardrails() ``` ### [get_direct_lake_lakehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_lakehouse) @@ -928,10 +924,10 @@ directlake.get_direct_lake_guardrails( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.get_direct_lake_lakehouse( - dataset='', - workspace=None, - lakehouse=None, - lakehouse_workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -940,8 +936,8 @@ directlake.get_direct_lake_lakehouse( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.get_direct_lake_source( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -959,8 +955,8 @@ directlake.get_directlake_guardrails_for_sku( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.get_shared_expression( - lakehouse=None, - workspace=None, + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -969,7 +965,7 @@ directlake.get_shared_expression( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.get_sku_size( - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -978,8 +974,8 @@ directlake.get_sku_size( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.list_direct_lake_model_calc_tables( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -988,8 +984,8 @@ directlake.list_direct_lake_model_calc_tables( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.show_unsupported_direct_lake_objects( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -998,10 +994,10 @@ directlake.show_unsupported_direct_lake_objects( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.update_direct_lake_model_lakehouse_connection( - dataset='', - workspace=None, - lakehouse=None, - lakehouse_workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -1010,10 +1006,10 @@ directlake.update_direct_lake_model_lakehouse_connection( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.update_direct_lake_partition_entity( - dataset='', + dataset='AdvWorks', table_name='', entity_name='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -1022,8 +1018,8 @@ directlake.update_direct_lake_partition_entity( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.warm_direct_lake_cache_isresident( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -1032,10 +1028,10 @@ directlake.warm_direct_lake_cache_isresident( import sempy_labs as labs import sempy_labs.directlake as directlake directlake.warm_direct_lake_cache_perspective( - dataset='', + dataset='AdvWorks', perspective='', - add_dependencies=None, - workspace=None, + add_dependencies=False, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -1048,8 +1044,8 @@ lake.create_shortcut_onelake( source_lakehouse='', source_workspace='', destination_lakehouse='', - destination_workspace=None, - shortcut_name=None, + destination_workspace=None, # This parameter is optional + shortcut_name=None, # This parameter is optional ) ``` @@ -1059,8 +1055,8 @@ import sempy_labs as labs import sempy_labs.lakehouse as lake lake.delete_shortcut( shortcut_name='', - lakehouse=None, - workspace=None, + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -1069,8 +1065,8 @@ lake.delete_shortcut( import sempy_labs as labs import sempy_labs.lakehouse as lake lake.get_lakehouse_columns( - lakehouse=None, - workspace=None, + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -1079,11 +1075,11 @@ lake.get_lakehouse_columns( import sempy_labs as labs import sempy_labs.lakehouse as lake lake.get_lakehouse_tables( - lakehouse=None, - workspace=None, - extended=None, - count_rows=None, - export=None, + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional + extended=False, # This parameter is optional + count_rows=False, # This parameter is optional + export=False, # This parameter is optional ) ``` @@ -1091,8 +1087,7 @@ lake.get_lakehouse_tables( ```python import sempy_labs as labs import sempy_labs.lakehouse as lake -lake.lakehouse_attached( -) +lake.lakehouse_attached() ``` ### [optimize_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.optimize_lakehouse_tables) @@ -1100,9 +1095,9 @@ lake.lakehouse_attached( import sempy_labs as labs import sempy_labs.lakehouse as lake lake.optimize_lakehouse_tables( - tables=None, - lakehouse=None, - workspace=None, + tables=None, # This parameter is optional + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -1111,10 +1106,10 @@ lake.optimize_lakehouse_tables( import sempy_labs as labs import sempy_labs.lakehouse as lake lake.vacuum_lakehouse_tables( - tables=None, - lakehouse=None, - workspace=None, - retain_n_hours=None, + tables=None, # This parameter is optional + lakehouse=None, # This parameter is optional + workspace=None, # This parameter is optional + retain_n_hours=None, # This parameter is optional ) ``` @@ -1123,9 +1118,9 @@ lake.vacuum_lakehouse_tables( import sempy_labs as labs import sempy_labs.migration as migration migration.create_pqt_file( - dataset='', - workspace=None, - file_name=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional + file_name='PowerQueryTemplate', # This parameter is optional ) ``` @@ -1134,12 +1129,12 @@ migration.create_pqt_file( import sempy_labs as labs import sempy_labs.migration as migration migration.migrate_calc_tables_to_lakehouse( - dataset='', + dataset='AdvWorks', new_dataset='', - workspace=None, - new_dataset_workspace=None, - lakehouse=None, - lakehouse_workspace=None, + workspace=None, # This parameter is optional + new_dataset_workspace=None, # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -1148,12 +1143,12 @@ migration.migrate_calc_tables_to_lakehouse( import sempy_labs as labs import sempy_labs.migration as migration migration.migrate_calc_tables_to_semantic_model( - dataset='', + dataset='AdvWorks', new_dataset='', - workspace=None, - new_dataset_workspace=None, - lakehouse=None, - lakehouse_workspace=None, + workspace=None, # This parameter is optional + new_dataset_workspace=None, # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -1162,10 +1157,10 @@ migration.migrate_calc_tables_to_semantic_model( import sempy_labs as labs import sempy_labs.migration as migration migration.migrate_field_parameters( - dataset='', + dataset='AdvWorks', new_dataset='', - workspace=None, - new_dataset_workspace=None, + workspace=None, # This parameter is optional + new_dataset_workspace=None, # This parameter is optional ) ``` @@ -1174,10 +1169,10 @@ migration.migrate_field_parameters( import sempy_labs as labs import sempy_labs.migration as migration migration.migrate_model_objects_to_semantic_model( - dataset='', + dataset='AdvWorks', new_dataset='', - workspace=None, - new_dataset_workspace=None, + workspace=None, # This parameter is optional + new_dataset_workspace=None, # This parameter is optional ) ``` @@ -1186,12 +1181,12 @@ migration.migrate_model_objects_to_semantic_model( import sempy_labs as labs import sempy_labs.migration as migration migration.migrate_tables_columns_to_semantic_model( - dataset='', + dataset='AdvWorks', new_dataset='', - workspace=None, - new_dataset_workspace=None, - lakehouse=None, - lakehouse_workspace=None, + workspace=None, # This parameter is optional + new_dataset_workspace=None, # This parameter is optional + lakehouse=None, # This parameter is optional + lakehouse_workspace=None, # This parameter is optional ) ``` @@ -1200,10 +1195,10 @@ migration.migrate_tables_columns_to_semantic_model( import sempy_labs as labs import sempy_labs.migration as migration migration.migration_validation( - dataset='', + dataset='AdvWorks', new_dataset='', - workspace=None, - new_dataset_workspace=None, + workspace=None, # This parameter is optional + new_dataset_workspace=None, # This parameter is optional ) ``` @@ -1212,8 +1207,8 @@ migration.migration_validation( import sempy_labs as labs import sempy_labs.migration as migration migration.refresh_calc_tables( - dataset='', - workspace=None, + dataset='AdvWorks', + workspace=None, # This parameter is optional ) ``` @@ -1224,10 +1219,10 @@ import sempy_labs.report as rep rep.clone_report( report='', cloned_report='', - workspace=None, - target_workspace=None, - target_dataset=None, - target_dataset_workspace=None, + workspace=None, # This parameter is optional + target_workspace=None, # This parameter is optional + target_dataset=None, # This parameter is optional + target_dataset_workspace=None, # This parameter is optional ) ``` @@ -1236,9 +1231,9 @@ rep.clone_report( import sempy_labs as labs import sempy_labs.report as rep rep.create_model_bpa_report( - report=None, - dataset=None, - dataset_workspace=None, + report='ModelBPA', # This parameter is optional + dataset='ModelBPA', # This parameter is optional + dataset_workspace=None, # This parameter is optional ) ``` @@ -1248,10 +1243,10 @@ import sempy_labs as labs import sempy_labs.report as rep rep.create_report_from_reportjson( report='', - dataset='', + dataset='AdvWorks', report_json='', - theme_json=None, - workspace=None, + theme_json=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -1262,12 +1257,12 @@ import sempy_labs.report as rep rep.export_report( report='', export_format='', - file_name=None, - bookmark_name=None, - page_name=None, - visual_name=None, - report_filter=None, - workspace=None, + file_name=None, # This parameter is optional + bookmark_name=None, # This parameter is optional + page_name=None, # This parameter is optional + visual_name=None, # This parameter is optional + report_filter=None, # This parameter is optional + workspace=None, # This parameter is optional ) ``` @@ -1277,7 +1272,7 @@ import sempy_labs as labs import sempy_labs.report as rep rep.get_report_definition( report='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -1287,8 +1282,8 @@ import sempy_labs as labs import sempy_labs.report as rep rep.get_report_json( report='', - workspace=None, - save_to_file_name=None, + workspace=None, # This parameter is optional + save_to_file_name=None, # This parameter is optional ) ``` @@ -1298,7 +1293,7 @@ import sempy_labs as labs import sempy_labs.report as rep rep.launch_report( report='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -1308,9 +1303,9 @@ import sempy_labs as labs import sempy_labs.report as rep rep.report_rebind( report='', - dataset='', - report_workspace=None, - dataset_workspace=None, + dataset='AdvWorks', + report_workspace=None, # This parameter is optional + dataset_workspace=None, # This parameter is optional ) ``` @@ -1319,11 +1314,11 @@ rep.report_rebind( import sempy_labs as labs import sempy_labs.report as rep rep.report_rebind_all( - dataset='', + dataset='AdvWorks', new_dataset='', - dataset_workspace=None, - new_dataset_workpace=None, - report_workspace=None, + dataset_workspace=None, # This parameter is optional + new_dataset_workpace=None, # This parameter is optional + report_workspace=None, # This parameter is optional ) ``` @@ -1334,7 +1329,7 @@ import sempy_labs.report as rep rep.update_report_from_reportjson( report='', report_json='', - workspace=None, + workspace=None, # This parameter is optional ) ``` @@ -1348,13 +1343,13 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: column_name='', expression='', data_type='', - format_string=None, - hidden=None, - description=None, - display_folder=None, - data_category=None, - key=None, - summarize_by=None, + format_string=None, # This parameter is optional + hidden=False, # This parameter is optional + description=None, # This parameter is optional + display_folder=None, # This parameter is optional + data_category=None, # This parameter is optional + key=False, # This parameter is optional + summarize_by=None, # This parameter is optional ) ``` @@ -1366,9 +1361,9 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_calculated_table( name='', expression='', - description=None, - data_category=None, - hidden=None, + description=None, # This parameter is optional + data_category=None, # This parameter is optional + hidden=False, # This parameter is optional ) ``` @@ -1382,13 +1377,13 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: column_name='', source_column='', data_type='', - format_string=None, - hidden=None, - description=None, - display_folder=None, - data_category=None, - key=None, - summarize_by=None, + format_string=None, # This parameter is optional + hidden=False, # This parameter is optional + description=None, # This parameter is optional + display_folder=None, # This parameter is optional + data_category=None, # This parameter is optional + key=False, # This parameter is optional + summarize_by=None, # This parameter is optional ) ``` @@ -1400,8 +1395,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_calculation_group( name='', precedence='', - description=None, - hidden=None, + description=None, # This parameter is optional + hidden=False, # This parameter is optional ) ``` @@ -1414,9 +1409,9 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: table_name='', calculation_item_name='', expression='', - ordinal=None, - description=None, - format_string_expression=None, + ordinal=None, # This parameter is optional + description=None, # This parameter is optional + format_string_expression=None, # This parameter is optional ) ``` @@ -1430,13 +1425,13 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: column_name='', source_column='', data_type='', - format_string=None, - hidden=None, - description=None, - display_folder=None, - data_category=None, - key=None, - summarize_by=None, + format_string=None, # This parameter is optional + hidden=False, # This parameter is optional + description=None, # This parameter is optional + display_folder=None, # This parameter is optional + data_category=None, # This parameter is optional + key=False, # This parameter is optional + summarize_by=None, # This parameter is optional ) ``` @@ -1448,8 +1443,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_entity_partition( table_name='', entity_name='', - expression=None, - description=None, + expression=None, # This parameter is optional + description=None, # This parameter is optional ) ``` @@ -1461,7 +1456,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_expression( name='', expression='', - description=None, + description=None, # This parameter is optional ) ``` @@ -1473,7 +1468,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_field_parameter( table_name='', objects='', - object_names='', + object_names=None, # This parameter is optional ) ``` @@ -1486,9 +1481,9 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: table_name='', hierarchy_name='', columns='', - levels=None, - hierarchy_description=None, - hierarchy_hidden=None, + levels=None, # This parameter is optional + hierarchy_description=None, # This parameter is optional + hierarchy_hidden=False, # This parameter is optional ) ``` @@ -1506,8 +1501,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: incremental_periods='', rolling_window_granularity='', rolling_window_periods='', - only_refresh_complete_days=None, - detect_data_changes_column=None, + only_refresh_complete_days=False, # This parameter is optional + detect_data_changes_column=None, # This parameter is optional ) ``` @@ -1520,8 +1515,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: table_name='', partition_name='', expression='', - mode=None, - description=None, + mode=None, # This parameter is optional + description=None, # This parameter is optional ) ``` @@ -1534,11 +1529,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: table_name='', measure_name='', expression='', - format_string=None, - hidden=None, - description=None, - display_folder=None, - format_string_expression=None, + format_string=None, # This parameter is optional + hidden=False, # This parameter is optional + description=None, # This parameter is optional + display_folder=None, # This parameter is optional + format_string_expression=None, # This parameter is optional ) ``` @@ -1564,10 +1559,10 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: to_column='', from_cardinality='', to_cardinality='', - cross_filtering_behavior=None, - is_active=None, - security_filtering_behavior=None, - rely_on_referential_integrity=None, + cross_filtering_behavior=None, # This parameter is optional + is_active=True, # This parameter is optional + security_filtering_behavior=None, # This parameter is optional + rely_on_referential_integrity=False, # This parameter is optional ) ``` @@ -1578,8 +1573,8 @@ from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_role( role_name='', - model_permission=None, - description=None, + model_permission=None, # This parameter is optional + description=None, # This parameter is optional ) ``` @@ -1590,9 +1585,9 @@ from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_table( name='', - description=None, - data_category=None, - hidden=None, + description=None, # This parameter is optional + data_category=None, # This parameter is optional + hidden=False, # This parameter is optional ) ``` @@ -1614,7 +1609,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.add_to_perspective( - object='', + object=, perspective_name='', ) ``` @@ -1634,8 +1629,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculated_columns( -) + tom.all_calculated_columns() ``` ### [all_calculated_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculated_tables) @@ -1643,8 +1637,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculated_tables( -) + tom.all_calculated_tables() ``` ### [all_calculation_groups](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculation_groups) @@ -1652,8 +1645,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculation_groups( -) + tom.all_calculation_groups() ``` ### [all_calculation_items](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculation_items) @@ -1661,8 +1653,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculation_items( -) + tom.all_calculation_items() ``` ### [all_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_columns) @@ -1670,8 +1661,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_columns( -) + tom.all_columns() ``` ### [all_date_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_date_tables) @@ -1679,8 +1669,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_date_tables( -) + tom.all_date_tables() ``` ### [all_hierarchies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_hierarchies) @@ -1688,8 +1677,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_hierarchies( -) + tom.all_hierarchies() ``` ### [all_hybrid_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_hybrid_tables) @@ -1697,8 +1685,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_hybrid_tables( -) + tom.all_hybrid_tables() ``` ### [all_levels](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_levels) @@ -1706,8 +1693,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_levels( -) + tom.all_levels() ``` ### [all_measures](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_measures) @@ -1715,8 +1701,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_measures( -) + tom.all_measures() ``` ### [all_partitions](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_partitions) @@ -1724,8 +1709,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_partitions( -) + tom.all_partitions() ``` ### [all_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_rls) @@ -1733,8 +1717,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_rls( -) + tom.all_rls() ``` ### [apply_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.apply_refresh_policy) @@ -1744,9 +1727,9 @@ from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.apply_refresh_policy( table_name='', - effective_date=None, - refresh=None, - max_parallelism=None, + effective_date=None, # This parameter is optional + refresh=True, # This parameter is optional + max_parallelism=0, # This parameter is optional ) ``` @@ -1756,7 +1739,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.cardinality( - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -1766,7 +1749,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.clear_annotations( - object='', + object=, ) ``` @@ -1776,7 +1759,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.clear_extended_properties( - object='', + object=, ) ``` @@ -1786,7 +1769,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.data_size( - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -1796,8 +1779,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.depends_on( - object='', - dependencies='', + object=, + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -1807,7 +1790,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.dictionary_size( - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -1817,8 +1800,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.fully_qualified_measures( - object='', - dependencies='', + object=tom.model.Tables["Sales"].Measures["Sales Amount"], + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -1828,7 +1811,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.get_annotation_value( - object='', + object=, name='', ) ``` @@ -1839,7 +1822,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.get_annotations( - object='', + object=, ) ``` @@ -1849,7 +1832,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.get_extended_properties( - object='', + object=, ) ``` @@ -1859,7 +1842,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.get_extended_property_value( - object='', + object=, name='', ) ``` @@ -1869,8 +1852,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_aggs( -) + tom.has_aggs() ``` ### [has_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_date_table) @@ -1878,8 +1860,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_date_table( -) + tom.has_date_table() ``` ### [has_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_hybrid_table) @@ -1887,8 +1868,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_hybrid_table( -) + tom.has_hybrid_table() ``` ### [has_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_incremental_refresh_policy) @@ -1907,7 +1887,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.in_perspective( - object='', + object=, perspective_name='', ) ``` @@ -1957,8 +1937,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_direct_lake( -) + tom.is_direct_lake() ``` ### [is_direct_lake_using_view](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake_using_view) @@ -1966,8 +1945,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_direct_lake_using_view( -) + tom.is_direct_lake_using_view() ``` ### [is_field_parameter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_field_parameter) @@ -2007,7 +1985,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.records_per_segment( - object='', + object=tom.model.Tables["Sales"].Partitions["Sales"], ) ``` @@ -2017,8 +1995,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.referenced_by( - object='', - dependencies='', + object=, + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -2039,7 +2017,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.remove_annotation( - object='', + object=, name='', ) ``` @@ -2050,7 +2028,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.remove_extended_property( - object='', + object=, name='', ) ``` @@ -2061,7 +2039,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.remove_from_perspective( - object='', + object=, perspective_name='', ) ``` @@ -2072,7 +2050,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.remove_object( - object='', + object=, ) ``` @@ -2093,7 +2071,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.remove_translation( - object='', + object=, language='', ) ``` @@ -2103,8 +2081,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_vertipaq_annotations( -) + tom.remove_vertipaq_annotations() ``` ### [row_count](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.row_count) @@ -2113,7 +2090,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.row_count( - object='', + object=tom.model.Tables["Sales"], ) ``` @@ -2138,7 +2115,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: column_name='', summarization_type='', base_table='', - base_column=None, + base_column=None, # This parameter is optional ) ``` @@ -2148,7 +2125,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.set_annotation( - object='', + object=, name='', value='', ) @@ -2206,7 +2183,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.set_extended_property( - object='', + object=, extended_property_type='', name='', value='', @@ -2221,7 +2198,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.set_is_available_in_mdx( table_name='', column_name='', - value=None, + value=False, # This parameter is optional ) ``` @@ -2235,10 +2212,10 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: target='', lower_bound='', upper_bound='', - lower_mid_bound=None, - upper_mid_bound=None, - status_type=None, - status_graphic=None, + lower_mid_bound=None, # This parameter is optional + upper_mid_bound=None, # This parameter is optional + status_type=None, # This parameter is optional + status_graphic=None, # This parameter is optional ) ``` @@ -2287,7 +2264,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.set_summarize_by( table_name='', column_name='', - value=None, + value='Default', # This parameter is optional ) ``` @@ -2297,7 +2274,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.set_translation( - object='', + object=tom.model.Tables["Sales"], language='', property='', value='', @@ -2309,8 +2286,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_vertipaq_annotations( -) + tom.set_vertipaq_annotations() ``` ### [show_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.show_incremental_refresh_policy) @@ -2329,7 +2305,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.total_size( - object='', + object=tom.model.Tables["Sales"], ) ``` @@ -2339,8 +2315,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.unqualified_columns( - object='', - dependencies='', + object=, + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -2352,10 +2328,10 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.update_calculation_item( table_name='', calculation_item_name='', - expression=None, - ordinal=None, - description=None, - format_string_expression=None, + expression=None, # This parameter is optional + ordinal=None, # This parameter is optional + description=None, # This parameter is optional + format_string_expression=None, # This parameter is optional ) ``` @@ -2367,16 +2343,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.update_column( table_name='', column_name='', - source_column=None, - data_type=None, - expression=None, - format_string=None, - hidden=None, - description=None, - display_folder=None, - data_category=None, - key=None, - summarize_by=None, + source_column=None, # This parameter is optional + data_type=None, # This parameter is optional + expression=None, # This parameter is optional + format_string=None, # This parameter is optional + hidden=None, # This parameter is optional + description=None, # This parameter is optional + display_folder=None, # This parameter is optional + data_category=None, # This parameter is optional + key=None, # This parameter is optional + summarize_by=None, # This parameter is optional ) ``` @@ -2391,8 +2367,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: incremental_periods='', rolling_window_granularity='', rolling_window_periods='', - only_refresh_complete_days=None, - detect_data_changes_column=None, + only_refresh_complete_days=False, # This parameter is optional + detect_data_changes_column=None, # This parameter is optional ) ``` @@ -2404,9 +2380,9 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.update_m_partition( table_name='', partition_name='', - expression=None, - mode=None, - description=None, + expression=None, # This parameter is optional + mode=None, # This parameter is optional + description=None, # This parameter is optional ) ``` @@ -2417,12 +2393,12 @@ from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.update_measure( measure_name='', - expression=None, - format_string=None, - hidden=None, - description=None, - display_folder=None, - format_string_expression=None, + expression=None, # This parameter is optional + format_string=None, # This parameter is optional + hidden=None, # This parameter is optional + description=None, # This parameter is optional + display_folder=None, # This parameter is optional + format_string_expression=None, # This parameter is optional ) ``` @@ -2433,8 +2409,8 @@ from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.update_role( role_name='', - model_permission=None, - description=None, + model_permission=None, # This parameter is optional + description=None, # This parameter is optional ) ``` @@ -2444,8 +2420,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_calc_item( - object='', - dependencies='', + object=, + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -2455,8 +2431,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_data_coverage_definition( - object='', - dependencies='', + object=, + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -2466,7 +2442,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_hierarchies( - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -2476,7 +2452,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_levels( - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -2486,7 +2462,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_relationships( - object='', + object=tom.model.Tables["Sales"], ) ``` @@ -2496,8 +2472,8 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_rls( - object='', - dependencies='', + object=tom.model.Tables["Sales"], + dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), ) ``` @@ -2507,7 +2483,7 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_in_sort_by( - column='', + column=tom.model.Tables["Geography"].Columns["GeographyKey"], ) ``` @@ -2517,6 +2493,6 @@ import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.used_size( - object='', + object=tom.model.Tables["Geography"].Hierarchies["Geo Hierarchy"], ) ``` diff --git a/notebooks/Tabular Object Model.ipynb b/notebooks/Tabular Object Model.ipynb index b95949d1..041cd92c 100644 --- a/notebooks/Tabular Object Model.ipynb +++ b/notebooks/Tabular Object Model.ipynb @@ -1 +1 @@ -{"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Connect to the [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))\n","Setting the 'readonly' property to False enables read/write mode. This allows changes to be made to the semantic model."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs.tom import connect_semantic_model\n","\n","dataset = '' # Enter dataset name\n","workspace = None # Enter workspace name\n","\n","with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," print(t.Name)"]},{"cell_type":"markdown","id":"fc6b277e","metadata":{},"source":["### Make changes to a semantic model using custom functions\n","Note that the custom functions have additional optional parameters (which may not be used in the examples below) for adding properties to model objects. Check the [documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) to see all available parameters for each function."]},{"cell_type":"markdown","id":"6d46d878","metadata":{},"source":["#### Rename objects in the semantic model"]},{"cell_type":"code","execution_count":null,"id":"1284825a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," t.Name = t.Name.replace('_',' ')\n"]},{"cell_type":"code","execution_count":null,"id":"d3b60303","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," c.Name = c.Name.replace('_',' ')"]},{"cell_type":"markdown","id":"402a477c","metadata":{},"source":["#### Add measure(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"bdaaaa5c","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\") "]},{"cell_type":"code","execution_count":null,"id":"a53a544b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Internet Sales':\n"," tom.add_measure(table_name = t.Name, measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name = t.Name, measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\")"]},{"cell_type":"markdown","id":"1cb1632f","metadata":{},"source":["#### Add column(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"81a22749","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_data_column(table_name ='Product', column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," tom.add_data_column(table_name = 'Segment', column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n","\n"," tom.add_calculated_column(table_name = 'Internet Sales', column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"code","execution_count":null,"id":"053b6516","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," elif t.Name == 'Segment':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n"," elif t.Name == 'Internet Sales':\n"," tom.add_calculated_column(table_name = t.Name, column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"markdown","id":"f53dcca7","metadata":{},"source":["#### Add hierarchies to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"a9309e23","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_hierarchy(table_name = 'Geography', hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"code","execution_count":null,"id":"a04281ce","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Geography':\n"," tom.add_hierarchy(table_name = t.Name, hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"markdown","id":"47c06a4f","metadata":{},"source":["#### Add relationship(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"e8cd7bbf","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_relationship(\n"," from_table = 'Internet Sales', from_column = 'ProductKey',\n"," to_table = 'Product', to_column = 'ProductKey', \n"," from_cardinality = 'Many', to_cardinality = 'One')"]},{"cell_type":"markdown","id":"3cc7f11e","metadata":{},"source":["#### Add a table with an M partition to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0f5dd66a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_m_partition(table_name = table_name, partition_name = table_name, expression = 'let....')"]},{"cell_type":"markdown","id":"ea389123","metadata":{},"source":["#### Add a table with an entity partition to a Direct Lake semantic model "]},{"cell_type":"code","execution_count":null,"id":"f75387d1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_entity_partition(table_name = table_name, entity_name = table_name)"]},{"cell_type":"markdown","id":"e74d0f54","metadata":{},"source":["#### Add a calculated table (and columns) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"934f7315","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_calculated_table(name = table_name, expression = \"DISTINCT('Product'[Color])\")\n"," tom.add_calculated_table_column(table_name = table_name, column_name = 'Color', source_column = \"'Product[Color]\", data_type = 'String')"]},{"cell_type":"markdown","id":"0e7088b7","metadata":{},"source":["#### Add role(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"ad60ebb9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_role(role_name = 'Reader')"]},{"cell_type":"markdown","id":"c541f81a","metadata":{},"source":["#### Set row level security (RLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"98603a08","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_rls(role_name ='Reader', table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"code","execution_count":null,"id":"effea009","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," tom.set_rls(role_name = r.Name, table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"markdown","id":"7fa7a03c","metadata":{},"source":["#### Set object level security (OLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"dd0def9d","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_ols(role_name = 'Reader', table_name = 'Product', column_name = 'Size', permission = 'None')"]},{"cell_type":"code","execution_count":null,"id":"7a389dc7","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.set_ols(role_name = r.Name, table_name = t.Name, column_name = 'Size', permission = 'None')"]},{"cell_type":"markdown","id":"d0f7ccd1","metadata":{},"source":["#### Add calculation groups and calculation items to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"97f4708b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_group(name = 'MyCalcGroup')"]},{"cell_type":"code","execution_count":null,"id":"fef68832","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"code","execution_count":null,"id":"c7653dcc","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'MyCalcGroup':\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"markdown","id":"c6450c74","metadata":{},"source":["#### Add translations to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"2b616b90","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_translation(language = 'it-IT')"]},{"cell_type":"code","execution_count":null,"id":"dc24c200","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_translation(object = tom.model.Tables['Product'], language = 'it-IT', property = 'Name', value = 'Produtto')"]},{"cell_type":"markdown","id":"3048cc95","metadata":{},"source":["#### Add a [Field Parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0a94af94","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_field_parameter(table_name = 'Parameter', objects = \"'Product'[Color], [Sales Amount], 'Geography'[Country]\")"]},{"cell_type":"markdown","id":"95aac09a","metadata":{},"source":["#### Remove an object(s) from a semantic model"]},{"cell_type":"code","execution_count":null,"id":"1e2572a8","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.remove_object(object = t.Columns['Size'])\n"," tom.remove_object(object = t.Hierarchies['Product Hierarchy'])"]},{"cell_type":"code","execution_count":null,"id":"bc453177","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.remove_object(object = tom.model.Tables['Product'].Columns['Size'])\n"," tom.remove_object(object = tom.model.Tables['Product'].Hierarchies['Product Hierarchy'])"]},{"cell_type":"markdown","id":"e0d0cb9e","metadata":{},"source":["### Custom functions to loop through non-top-level objects in a semantic model"]},{"cell_type":"code","execution_count":null,"id":"cbe3b1a3","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," print(c.Name)"]},{"cell_type":"code","execution_count":null,"id":"3f643e66","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for m in tom.all_measures():\n"," print(m.Name)"]},{"cell_type":"code","execution_count":null,"id":"ed1cde0f","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for p in tom.all_partitions():\n"," print(p.Name)"]},{"cell_type":"code","execution_count":null,"id":"f48014ae","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for h in tom.all_hierarchies():\n"," print(h.Name)"]},{"cell_type":"code","execution_count":null,"id":"9f5e7b72","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for ci in tom.all_calculation_items():\n"," print(ci.Name)"]},{"cell_type":"code","execution_count":null,"id":"3cd9ebc1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for l in tom.all_levels():\n"," print(l.Name)"]},{"cell_type":"code","execution_count":null,"id":"12c58bad","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for rls in tom.all_rls():\n"," print(rls.Name)"]},{"cell_type":"markdown","id":"1a294bd2","metadata":{},"source":["### See Vertipaq Analyzer stats"]},{"cell_type":"code","execution_count":null,"id":"469660e9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_vertipaq_annotations()\n","\n"," for t in tom.model.Tables:\n"," rc = tom.row_count(object = t)\n"," print(t.Name + ' : ' + str(rc))\n"," for c in t.Columns:\n"," col_size = tom.total_size(column = c)\n"," print(labs.format_dax_object_name(t.Name, c.Name) + ' : ' + str(col_size))"]},{"cell_type":"markdown","id":"1ab26dfd","metadata":{},"source":["### 'UsedIn' functions"]},{"cell_type":"code","execution_count":null,"id":"412bf287","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for h in tom.used_in_hierarchies(column = c):\n"," print(full_name + ' : ' + h.Name)"]},{"cell_type":"code","execution_count":null,"id":"76556900","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for r in tom.used_in_relationships(object = c):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(full_name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"4d9ec24e","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," for r in tom.used_in_relationships(object = t):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(t.Name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"82251336","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," dep = labs.get_model_calc_dependencies(dataset = dataset, workspace=workspace)\n"," for o in tom.used_in_rls(object = tom.model.Tables['Product'].Columns['Color'], dependencies=dep):\n"," print(o.Name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5} +{"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Connect to the [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))\n","Setting the 'readonly' property to False enables read/write mode. This allows changes to be made to the semantic model."]},{"cell_type":"code","execution_count":null,"id":"cde43b47-4ecc-46ae-9125-9674819c7eab","metadata":{"jupyter":{"outputs_hidden":false,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["import sempy_labs as labs\n","from sempy_labs.tom import connect_semantic_model\n","\n","dataset = '' # Enter dataset name\n","workspace = None # Enter workspace name\n","\n","with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," print(t.Name)"]},{"cell_type":"markdown","id":"fc6b277e","metadata":{},"source":["### Make changes to a semantic model using custom functions\n","Note that the custom functions have additional optional parameters (which may not be used in the examples below) for adding properties to model objects. Check the [documentation](https://semantic-link-labs.readthedocs.io/en/0.5.0/sempy_labs.tom.html) to see all available parameters for each function."]},{"cell_type":"markdown","id":"6d46d878","metadata":{},"source":["#### Rename objects in the semantic model"]},{"cell_type":"code","execution_count":null,"id":"1284825a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," t.Name = t.Name.replace('_',' ')"]},{"cell_type":"code","execution_count":null,"id":"d3b60303","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," c.Name = c.Name.replace('_',' ')"]},{"cell_type":"markdown","id":"402a477c","metadata":{},"source":["#### Add measure(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"bdaaaa5c","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name ='Internet Sales', measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\") "]},{"cell_type":"code","execution_count":null,"id":"a53a544b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Internet Sales':\n"," tom.add_measure(table_name = t.Name, measure_name = 'Sales Amount', expression = \"SUM('Internet Sales'[SalesAmount])\")\n"," tom.add_measure(table_name = t.Name, measure_name = 'Order Quantity', expression = \"SUM('Internet Sales'[OrderQty])\")"]},{"cell_type":"markdown","id":"1cb1632f","metadata":{},"source":["#### Add column(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"81a22749","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_data_column(table_name ='Product', column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," tom.add_data_column(table_name = 'Segment', column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n","\n"," tom.add_calculated_column(table_name = 'Internet Sales', column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"code","execution_count":null,"id":"053b6516","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Size Range', source_column = 'SizeRange', data_type = 'Int64')\n"," elif t.Name == 'Segment':\n"," tom.add_data_column(table_name = t.Name, column_name = 'Summary Segment', source_column = 'SummarySegment', data_type = 'String')\n"," elif t.Name == 'Internet Sales':\n"," tom.add_calculated_column(table_name = t.Name, column_name = 'GrossMargin', expression = \"'Internet Sales'[SalesAmount] - 'Internet Sales'[ProductCost]\", data_type = 'Decimal')"]},{"cell_type":"markdown","id":"f53dcca7","metadata":{},"source":["#### Add hierarchies to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"a9309e23","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_hierarchy(table_name = 'Geography', hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"code","execution_count":null,"id":"a04281ce","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Geography':\n"," tom.add_hierarchy(table_name = t.Name, hierarchy_name = 'Geo Hierarchy', levels = ['Continent', 'Country', 'State', 'City'])"]},{"cell_type":"markdown","id":"47c06a4f","metadata":{},"source":["#### Add relationship(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"e8cd7bbf","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_relationship(\n"," from_table = 'Internet Sales', from_column = 'ProductKey',\n"," to_table = 'Product', to_column = 'ProductKey', \n"," from_cardinality = 'Many', to_cardinality = 'One')"]},{"cell_type":"markdown","id":"3cc7f11e","metadata":{},"source":["#### Add a table with an M partition to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0f5dd66a","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_m_partition(table_name = table_name, partition_name = table_name, expression = 'let....')"]},{"cell_type":"markdown","id":"ea389123","metadata":{},"source":["#### Add a table with an entity partition to a Direct Lake semantic model "]},{"cell_type":"code","execution_count":null,"id":"f75387d1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_table(name = table_name)\n"," tom.add_entity_partition(table_name = table_name, entity_name = table_name)"]},{"cell_type":"markdown","id":"e74d0f54","metadata":{},"source":["#### Add a calculated table (and columns) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"934f7315","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," table_name = 'Sales'\n"," tom.add_calculated_table(name = table_name, expression = \"DISTINCT('Product'[Color])\")\n"," tom.add_calculated_table_column(table_name = table_name, column_name = 'Color', source_column = \"'Product[Color]\", data_type = 'String')"]},{"cell_type":"markdown","id":"0e7088b7","metadata":{},"source":["#### Add role(s) to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"ad60ebb9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_role(role_name = 'Reader')"]},{"cell_type":"markdown","id":"c541f81a","metadata":{},"source":["#### Set row level security (RLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"98603a08","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_rls(role_name ='Reader', table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"code","execution_count":null,"id":"effea009","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," tom.set_rls(role_name = r.Name, table_name = 'Product', filter_expression = \"'Dim Product'[Color] = \\\"Blue\\\"\")"]},{"cell_type":"markdown","id":"7fa7a03c","metadata":{},"source":["#### Set object level security (OLS) to the semantic model\n","This adds row level security (or updates it if it already exists)"]},{"cell_type":"code","execution_count":null,"id":"dd0def9d","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_ols(role_name = 'Reader', table_name = 'Product', column_name = 'Size', permission = 'None')"]},{"cell_type":"code","execution_count":null,"id":"7a389dc7","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for r in tom.model.Roles:\n"," if r.Name == 'Reader':\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.set_ols(role_name = r.Name, table_name = t.Name, column_name = 'Size', permission = 'None')"]},{"cell_type":"markdown","id":"d0f7ccd1","metadata":{},"source":["#### Add calculation groups and calculation items to the semantic model"]},{"cell_type":"code","execution_count":null,"id":"97f4708b","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_group(name = 'MyCalcGroup')"]},{"cell_type":"code","execution_count":null,"id":"fef68832","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = 'MyCalcGroup', calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"code","execution_count":null,"id":"c7653dcc","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'MyCalcGroup':\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'YTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESYTD('Calendar'[CalendarDate]))\")\n"," tom.add_calculation_item(table_name = t.Name, calculation_item_name = 'MTD', expression = \"CALCULATE(SELECTEDMEASURE(), DATESMTD('Calendar'[CalendarDate]))\")"]},{"cell_type":"markdown","id":"c6450c74","metadata":{},"source":["#### Add translations to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"2b616b90","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_translation(language = 'it-IT')"]},{"cell_type":"code","execution_count":null,"id":"dc24c200","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_translation(object = tom.model.Tables['Product'], language = 'it-IT', property = 'Name', value = 'Produtto')"]},{"cell_type":"markdown","id":"3048cc95","metadata":{},"source":["#### Add a [Field Parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to a semantic model"]},{"cell_type":"code","execution_count":null,"id":"0a94af94","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.add_field_parameter(table_name = 'Parameter', objects = \"'Product'[Color], [Sales Amount], 'Geography'[Country]\")"]},{"cell_type":"markdown","id":"95aac09a","metadata":{},"source":["#### Remove an object(s) from a semantic model"]},{"cell_type":"code","execution_count":null,"id":"1e2572a8","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," if t.Name == 'Product':\n"," tom.remove_object(object = t.Columns['Size'])\n"," tom.remove_object(object = t.Hierarchies['Product Hierarchy'])"]},{"cell_type":"code","execution_count":null,"id":"bc453177","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.remove_object(object = tom.model.Tables['Product'].Columns['Size'])\n"," tom.remove_object(object = tom.model.Tables['Product'].Hierarchies['Product Hierarchy'])"]},{"cell_type":"markdown","id":"e0d0cb9e","metadata":{},"source":["### Custom functions to loop through non-top-level objects in a semantic model"]},{"cell_type":"code","execution_count":null,"id":"cbe3b1a3","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," print(c.Name)"]},{"cell_type":"code","execution_count":null,"id":"3f643e66","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for m in tom.all_measures():\n"," print(m.Name)"]},{"cell_type":"code","execution_count":null,"id":"ed1cde0f","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for p in tom.all_partitions():\n"," print(p.Name)"]},{"cell_type":"code","execution_count":null,"id":"f48014ae","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for h in tom.all_hierarchies():\n"," print(h.Name)"]},{"cell_type":"code","execution_count":null,"id":"9f5e7b72","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for ci in tom.all_calculation_items():\n"," print(ci.Name)"]},{"cell_type":"code","execution_count":null,"id":"3cd9ebc1","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for l in tom.all_levels():\n"," print(l.Name)"]},{"cell_type":"code","execution_count":null,"id":"12c58bad","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," for rls in tom.all_rls():\n"," print(rls.Name)"]},{"cell_type":"markdown","id":"1a294bd2","metadata":{},"source":["### See Vertipaq Analyzer stats"]},{"cell_type":"code","execution_count":null,"id":"469660e9","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=False, workspace=workspace) as tom:\n"," tom.set_vertipaq_annotations()\n","\n"," for t in tom.model.Tables:\n"," rc = tom.row_count(object = t)\n"," print(t.Name + ' : ' + str(rc))\n"," for c in t.Columns:\n"," col_size = tom.total_size(column = c)\n"," print(labs.format_dax_object_name(t.Name, c.Name) + ' : ' + str(col_size))"]},{"cell_type":"markdown","id":"1ab26dfd","metadata":{},"source":["### 'UsedIn' functions"]},{"cell_type":"code","execution_count":null,"id":"412bf287","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for h in tom.used_in_hierarchies(column = c):\n"," print(full_name + ' : ' + h.Name)"]},{"cell_type":"code","execution_count":null,"id":"76556900","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for c in tom.all_columns():\n"," full_name = labs.format_dax_object_name(c.Parent.Name, c.Name)\n"," for r in tom.used_in_relationships(object = c):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(full_name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"4d9ec24e","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," for t in tom.model.Tables:\n"," for r in tom.used_in_relationships(object = t):\n"," rel_name = labs.create_relationship_name(r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name)\n"," print(t.Name + ' : ' + rel_name)"]},{"cell_type":"code","execution_count":null,"id":"82251336","metadata":{},"outputs":[],"source":["with connect_semantic_model(dataset=dataset, readonly=True, workspace=workspace) as tom:\n"," dep = labs.get_model_calc_dependencies(dataset = dataset, workspace=workspace)\n"," for o in tom.used_in_rls(object = tom.model.Tables['Product'].Columns['Color'], dependencies=dep):\n"," print(o.Name)"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"nteract-front-end@1.0.0"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5} diff --git a/src/sempy_labs/_generate_semantic_model.py b/src/sempy_labs/_generate_semantic_model.py index fd1bfaf4..e1e755f4 100644 --- a/src/sempy_labs/_generate_semantic_model.py +++ b/src/sempy_labs/_generate_semantic_model.py @@ -17,7 +17,7 @@ def create_blank_semantic_model( dataset: str, - compatibility_level: int = 1605, + compatibility_level: Optional[int] = 1605, workspace: Optional[str] = None, ): """ diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index 3fdb959a..6468a670 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -2409,7 +2409,7 @@ def set_is_available_in_mdx( self.model.Tables[table_name].Columns[column_name].IsAvailableInMDX = value def set_summarize_by( - self, table_name: str, column_name: str, value: Optional[str] = None + self, table_name: str, column_name: str, value: Optional[str] = 'Default' ): """ Sets the `SummarizeBy `_ property on a column. @@ -2440,8 +2440,6 @@ def set_summarize_by( ] # https://learn.microsoft.com/en-us/dotnet/api/microsoft.analysisservices.tabular.column.summarizeby?view=analysisservices-dotnet#microsoft-analysisservices-tabular-column-summarizeby - if value is None: - value = "Default" value = ( value.capitalize() .replace("Distinctcount", "DistinctCount") From 34ce30a790e82df1fa7f1f4614118984c57ac334 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 5 Sep 2024 18:00:48 +0300 Subject: [PATCH 04/18] updated with parameters, returns --- .github/workflows/_generate_func_examples.py | 29 +- function_examples.md | 3920 ++++++++++++++++++ 2 files changed, 3947 insertions(+), 2 deletions(-) diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py index 9901a048..a7882fbe 100644 --- a/.github/workflows/_generate_func_examples.py +++ b/.github/workflows/_generate_func_examples.py @@ -1,6 +1,7 @@ import inspect import os -import typing +import re +from docstring_parser import parse import sempy_labs import sempy_labs.migration import sempy_labs.report @@ -20,6 +21,7 @@ link_prefix = "https://semantic-link-labs.readthedocs.io/en/stable/" tab = ' ' skip_functions = ['connect_semantic_model', '__init__', 'close'] +pattern = r'`([A-Za-z ]+) <(https?://[^\s]+)>`_' markdown_example = '## Function Examples\n' # Function Examples @@ -29,11 +31,16 @@ attr = getattr(d, attr_name) if inspect.isfunction(attr): if attr_name not in skip_functions: + docstring = parse(attr.__doc__) link = f"{link_prefix}{d_name}.html#{d_name}.{attr_name}" if d_alias == 'tom': link = f"{link_prefix}sempy_labs.{d_alias}.html#sempy_labs.{d_alias}.{d_name}.{attr_name}" sig = inspect.signature(attr) - markdown_example += f"\n### [{attr_name}]({link})\n```python" + markdown_example += f"\n### [{attr_name}]({link})" + attr_description = docstring.description + attr_description = re.sub(pattern, r'[\1](\2)', str(attr_description)) + markdown_example += f"\n#### {attr_description}" + markdown_example += "\n```python" markdown_example += "\nimport sempy_labs as labs" if d_alias == 'tom': markdown_example += "\nfrom sempy_labs.tom import connect_semantic_model" @@ -92,6 +99,24 @@ else: markdown_example += f"\n{closing}" + if docstring.params: + markdown_example += "\n### Parameters" + for p in docstring.params: + p_description = re.sub(pattern, r'[\1](\2)', str(p.description)) + for param_name, param in sig.parameters.items(): + if param_name == p.arg_name: + if param.default != inspect.Parameter.empty: + req = 'Optional' + else: + req = 'Required' + param_value = param.default + + markdown_example += f"\n> **{p.arg_name}** ({p.type_name})\n>\n>> {req}; {p_description}\n>" + if docstring.returns: + ret = docstring.returns + markdown_example += '\n### Returns' + markdown_example += f"\n> {ret.type_name}; {ret.description}" + output_path = os.path.join('/root/semantic-link-labs', 'function_examples.md') with open(output_path, 'w') as f: f.write(markdown_example) diff --git a/function_examples.md b/function_examples.md index 08fde774..f8ef0ac2 100644 --- a/function_examples.md +++ b/function_examples.md @@ -1,6 +1,8 @@ ## Function Examples ### [add_user_to_workspace](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.add_user_to_workspace) +#### Adds a user to a workspace. + ```python import sempy_labs as labs labs.add_user_to_workspace( @@ -11,7 +13,28 @@ labs.add_user_to_workspace( ) ``` +### Parameters +> **email_address** (str) +> +>> Required; The email address of the user. +> +> **role_name** (str) +> +>> Required; The [role](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright) of the user within the workspace. +> +> **principal_type** (str, default='User') +> +>> Optional; The [principal type](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype). +> +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [assign_workspace_to_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.assign_workspace_to_capacity) +#### Assigns a workspace to a capacity. + ```python import sempy_labs as labs labs.assign_workspace_to_capacity( @@ -20,7 +43,20 @@ labs.assign_workspace_to_capacity( ) ``` +### Parameters +> **capacity_name** (str) +> +>> Required; The name of the capacity. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [assign_workspace_to_dataflow_storage](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.assign_workspace_to_dataflow_storage) +#### Assigns a dataflow storage account to a workspace. + ```python import sempy_labs as labs labs.assign_workspace_to_dataflow_storage( @@ -29,7 +65,20 @@ labs.assign_workspace_to_dataflow_storage( ) ``` +### Parameters +> **dataflow_storage_account** (str) +> +>> Required; The name of the dataflow storage account. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [backup_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model) +#### [Backs up](https://learn.microsoft.com/azure/analysis-services/analysis-services-backup) a semantic model to the ADLS Gen2 storage account connected to the workspace. + ```python import sempy_labs as labs labs.backup_semantic_model( @@ -41,7 +90,35 @@ labs.backup_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **file_path** (str) +> +>> Required; The ADLS Gen2 storage account location in which to backup the semantic model. Always saves within the 'power-bi-backup/' folder. +Must end in '.abf'. +Example 1: file_path = 'MyModel.abf' +Example 2: file_path = 'MyFolder/MyModel.abf' +> +> **allow_overwrite** (bool, default=True) +> +>> Optional; If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location. +> +> **apply_compression** (bool, default=True) +> +>> Optional; If True, compresses the backup file. Compressed backup files save disk space, but require slightly higher CPU utilization. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [cancel_dataset_refresh](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.cancel_dataset_refresh) +#### Cancels the refresh of a semantic model which was executed via the [Enhanced Refresh API](https://learn.microsoft.com/power-bi/connect-data/asynchronous-refresh) + ```python import sempy_labs as labs labs.cancel_dataset_refresh( @@ -51,7 +128,25 @@ labs.cancel_dataset_refresh( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **request_id** (str, default=None) +> +>> Optional; The request id of a semantic model refresh. +Defaults to finding the latest active refresh of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [clear_cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache) +#### Clears the cache of a semantic model. +See [here](https://learn.microsoft.com/analysis-services/instances/clear-the-analysis-services-caches?view=asallproducts-allversions) for documentation. ```python import sempy_labs as labs labs.clear_cache( @@ -60,7 +155,20 @@ labs.clear_cache( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [commit_to_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.commit_to_git) +#### Commits all or a selection of items within a workspace to Git. + ```python import sempy_labs as labs labs.commit_to_git( @@ -70,7 +178,25 @@ labs.commit_to_git( ) ``` +### Parameters +> **comment** (str) +> +>> Required; The Git commit comment. +> +> **item_ids** (str | List[str], default=None) +> +>> Optional; A list of item Ids to commit to Git. +Defaults to None which commits all items to Git. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [connect_workspace_to_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.connect_workspace_to_git) +#### Connects a workspace to a git repository. + ```python import sempy_labs as labs labs.connect_workspace_to_git( @@ -84,7 +210,44 @@ labs.connect_workspace_to_git( ) ``` +### Parameters +> **organization_name** (str) +> +>> Required; The organization name. +> +> **project_name** (str) +> +>> Required; The project name. +> +> **repository_name** (str) +> +>> Required; The repository name. +> +> **branch_name** (str) +> +>> Required; The branch name. +> +> **directory_name** (str) +> +>> Required; The directory name. +> +> **git_provider_type** (str, default="AzureDevOps") +> +>> Optional; A [Git provider type](https://learn.microsoft.com/rest/api/fabric/core/git/connect?tabs=HTTP#gitprovidertype). +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [copy_semantic_model_backup_file](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file) +#### Copies a semantic model backup file (.abf) from an Azure storage account to another location within the Azure storage account. + +Requirements: + 1. Must have an Azure storage account and connect it to both the source and target workspace. + 2. Must have an Azure Key Vault. + 3. Must save the Account Key from the Azure storage account as a secret within Azure Key Vault. ```python import sempy_labs as labs labs.copy_semantic_model_backup_file( @@ -100,7 +263,46 @@ labs.copy_semantic_model_backup_file( ) ``` +### Parameters +> **source_workspace** (str) +> +>> Required; The workspace name of the source semantic model backup file. +> +> **target_workspace** (str) +> +>> Required; The workspace name of the target semantic model backup file destination. +> +> **source_file_name** (str) +> +>> Required; The name of the source backup file (i.e. MyModel.abf). +> +> **target_file_name** (str) +> +>> Required; The name of the target backup file (i.e. MyModel.abf). +> +> **storage_account_url** (str) +> +>> Required; The URL of the storage account. To find this, navigate to the storage account within the Azure Portal. Within 'Endpoints', see the value for the 'Primary Endpoint'. +> +> **key_vault_uri** (str) +> +>> Required; The URI of the Azure Key Vault account. +> +> **key_vault_account_key** (str) +> +>> Required; The key vault secret name which contains the account key of the Azure storage account. +> +> **source_file_system** (str, default="power-bi-backup") +> +>> Optional; The container in which the source backup file is located. +> +> **target_file_system** (str, default="power-bi-backup") +> +>> Optional; The container in which the target backup file will be saved. +> ### [create_abfss_path](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_abfss_path) +#### Creates an abfss path for a delta table in a Fabric lakehouse. + ```python import sempy_labs as labs labs.create_abfss_path( @@ -110,7 +312,24 @@ labs.create_abfss_path( ) ``` +### Parameters +> **lakehouse_id** (UUID) +> +>> Required; ID of the Fabric lakehouse. +> +> **lakehouse_workspace_id** (UUID) +> +>> Required; ID of the Fabric workspace. +> +> **delta_table_name** (str) +> +>> Required; Name of the delta table name. +> +### Returns +> str; An abfss path which can be used to save/reference a delta table in a Fabric lakehouse. ### [create_blank_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_blank_semantic_model) +#### Creates a new blank semantic model (no tables/columns etc.). + ```python import sempy_labs as labs labs.create_blank_semantic_model( @@ -120,7 +339,24 @@ labs.create_blank_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **compatibility_level** (int, default=1605) +> +>> Optional; The compatibility level of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [create_custom_pool](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_custom_pool) +#### Creates a [custom pool](https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools) within a workspace. + ```python import sempy_labs as labs labs.create_custom_pool( @@ -137,7 +373,55 @@ labs.create_custom_pool( ) ``` +### Parameters +> **pool_name** (str) +> +>> Required; The custom pool name. +> +> **node_size** (str) +> +>> Required; The [node size](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize). +> +> **min_node_count** (int) +> +>> Required; The [minimum node count](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). +> +> **max_node_count** (int) +> +>> Required; The [maximum node count](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). +> +> **min_executors** (int) +> +>> Required; The [minimum executors](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). +> +> **max_executors** (int) +> +>> Required; The [maximum executors](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). +> +> **node_family** (str, default='MemoryOptimized') +> +>> Optional; The [node family](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodefamily). +> +> **auto_scale_enabled** (bool, default=True) +> +>> Optional; The status of [auto scale](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). +> +> **dynamic_executor_allocation_enabled** (bool, default=True) +> +>> Optional; The status of the [dynamic executor allocation](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [create_model_bpa_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_model_bpa_semantic_model) +#### Dynamically generates a Direct Lake semantic model based on the 'modelbparesults' delta table which contains the Best Practice Analyzer results. +This semantic model used in combination with the corresponding Best Practice Analyzer report can be used to analyze multiple semantic models +on multiple workspaces at once (and over time). + +The semantic model is always created within the same workspace as the lakehouse. ```python import sempy_labs as labs labs.create_model_bpa_semantic_model( @@ -147,7 +431,25 @@ labs.create_model_bpa_semantic_model( ) ``` +### Parameters +> **dataset** (str, default='ModelBPA') +> +>> Optional; Name of the semantic model to be created. +> +> **lakehouse** (str, default=None) +> +>> Optional; Name of the Fabric lakehouse which contains the 'modelbparesults' delta table. +Defaults to None which resolves to the default lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The workspace in which the lakehouse resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [create_relationship_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_relationship_name) +#### Formats a relationship's table/columns into a fully qualified name. + ```python import sempy_labs as labs labs.create_relationship_name( @@ -158,7 +460,28 @@ labs.create_relationship_name( ) ``` +### Parameters +> **from_table** (str) +> +>> Required; The name of the table on the 'from' side of the relationship. +> +> **from_column** (str) +> +>> Required; The name of the column on the 'from' side of the relationship. +> +> **to_table** (str) +> +>> Required; The name of the table on the 'to' side of the relationship. +> +> **to_column** (str) +> +>> Required; The name of the column on the 'to' side of the relationship. +> +### Returns +> str; The fully qualified relationship name. ### [create_semantic_model_from_bim](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_semantic_model_from_bim) +#### Creates a new semantic model based on a Model.bim file. + ```python import sempy_labs as labs labs.create_semantic_model_from_bim( @@ -168,7 +491,24 @@ labs.create_semantic_model_from_bim( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **bim_file** (dict) +> +>> Required; The model.bim file. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [create_warehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_warehouse) +#### Creates a Fabric warehouse. + ```python import sempy_labs as labs labs.create_warehouse( @@ -178,7 +518,24 @@ labs.create_warehouse( ) ``` +### Parameters +> **warehouse** (str) +> +>> Required; Name of the warehouse. +> +> **description** (str, default=None) +> +>> Optional; A description of the warehouse. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [delete_custom_pool](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.delete_custom_pool) +#### Deletes a [custom pool](https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools) within a workspace. + ```python import sempy_labs as labs labs.delete_custom_pool( @@ -187,7 +544,20 @@ labs.delete_custom_pool( ) ``` +### Parameters +> **pool_name** (str) +> +>> Required; The custom pool name. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [delete_user_from_workspace](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.delete_user_from_workspace) +#### Removes a user from a workspace. + ```python import sempy_labs as labs labs.delete_user_from_workspace( @@ -196,7 +566,20 @@ labs.delete_user_from_workspace( ) ``` +### Parameters +> **email_address** (str) +> +>> Required; The email address of the user. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [deploy_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model) +#### Deploys a semantic model based on an existing semantic model. + ```python import sempy_labs as labs labs.deploy_semantic_model( @@ -208,7 +591,34 @@ labs.deploy_semantic_model( ) ``` +### Parameters +> **source_dataset** (str) +> +>> Required; Name of the semantic model to deploy. +> +> **source_workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **target_dataset** (str) +> +>> Optional; Name of the new semantic model to be created. +> +> **target_workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the new semantic model will be deployed. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **refresh_target_dataset** (bool, default=True) +> +>> Optional; If set to True, this will initiate a full refresh of the target semantic model in the target workspace. +> ### [deprovision_workspace_identity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deprovision_workspace_identity) +#### Deprovisions a workspace identity for a workspace. + ```python import sempy_labs as labs labs.deprovision_workspace_identity( @@ -216,7 +626,16 @@ labs.deprovision_workspace_identity( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [disable_qso](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.disable_qso) +#### Sets the max read-only replicas to 0, disabling query scale out. + ```python import sempy_labs as labs labs.disable_qso( @@ -225,7 +644,22 @@ labs.disable_qso( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the current query scale out settings. ### [disconnect_workspace_from_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.disconnect_workspace_from_git) +#### Disconnects a workpsace from a git repository. + ```python import sempy_labs as labs labs.disconnect_workspace_from_git( @@ -233,7 +667,18 @@ labs.disconnect_workspace_from_git( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [evaluate_dax_impersonation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation) +#### Runs a DAX query against a semantic model using the [REST API](https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group). + +Compared to evaluate_dax this allows passing the user name for impersonation. +Note that the REST API has significant limitations compared to the XMLA endpoint. ```python import sempy_labs as labs labs.evaluate_dax_impersonation( @@ -244,7 +689,30 @@ labs.evaluate_dax_impersonation( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **dax_query** (str) +> +>> Required; The DAX query. +> +> **user_name** (str) +> +>> Optional; The user name (i.e. hello@goodbye.com). +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe holding the result of the DAX query. ### [export_model_to_onelake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.export_model_to_onelake) +#### Exports a semantic model's tables to delta tables in the lakehouse. Creates shortcuts to the tables if a lakehouse is specified. + ```python import sempy_labs as labs labs.export_model_to_onelake( @@ -255,7 +723,28 @@ labs.export_model_to_onelake( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **destination_lakehouse** (str, default=None) +> +>> Optional; The name of the Fabric lakehouse where shortcuts will be created to access the delta tables created by the export. If the lakehouse specified does not exist, one will be created with that name. If no lakehouse is specified, shortcuts will not be created. +> +> **destination_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the lakehouse resides. +> ### [format_dax_object_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.format_dax_object_name) +#### Formats a table/column combination to the 'Table Name'[Column Name] format. + ```python import sempy_labs as labs labs.format_dax_object_name( @@ -264,7 +753,20 @@ labs.format_dax_object_name( ) ``` +### Parameters +> **table** (str) +> +>> Required; The name of the table. +> +> **column** (str) +> +>> Required; The name of the column. +> +### Returns +> str; The fully qualified object name. ### [generate_embedded_filter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.generate_embedded_filter) +#### Converts the filter expression to a filter expression which can be used by a Power BI embedded URL. + ```python import sempy_labs as labs labs.generate_embedded_filter( @@ -272,7 +774,16 @@ labs.generate_embedded_filter( ) ``` +### Parameters +> **filter** (str) +> +>> Required; The filter expression for an embedded Power BI report. +> +### Returns +> str; A filter expression usable by a Power BI embedded URL. ### [get_capacity_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_capacity_id) +#### Obtains the Capacity Id for a given workspace. + ```python import sempy_labs as labs labs.get_capacity_id( @@ -280,7 +791,18 @@ labs.get_capacity_id( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> UUID; The capacity Id. ### [get_capacity_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_capacity_name) +#### Obtains the capacity name for a given workspace. + ```python import sempy_labs as labs labs.get_capacity_name( @@ -288,7 +810,18 @@ labs.get_capacity_name( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; The capacity name. ### [get_direct_lake_sql_endpoint](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_direct_lake_sql_endpoint) +#### Obtains the SQL Endpoint ID of the semantic model. + ```python import sempy_labs as labs labs.get_direct_lake_sql_endpoint( @@ -297,7 +830,22 @@ labs.get_direct_lake_sql_endpoint( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; The name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> uuid.UUID; The ID of SQL Endpoint. ### [get_git_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_git_connection) +#### Obtains the Git status of items in the workspace, that can be committed to Git. + ```python import sempy_labs as labs labs.get_git_connection( @@ -305,7 +853,18 @@ labs.get_git_connection( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the Git status of items in the workspace. ### [get_git_status](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_git_status) +#### Obtains the Git status of items in the workspace, that can be committed to Git. + ```python import sempy_labs as labs labs.get_git_status( @@ -313,7 +872,18 @@ labs.get_git_status( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the Git status of items in the workspace. ### [get_measure_dependencies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_measure_dependencies) +#### Shows all dependencies for all measures in a semantic model. + ```python import sempy_labs as labs labs.get_measure_dependencies( @@ -322,7 +892,22 @@ labs.get_measure_dependencies( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; Shows all dependencies for all measures in the semantic model. ### [get_model_calc_dependencies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_model_calc_dependencies) +#### Shows all dependencies for all objects in a semantic model. + ```python import sempy_labs as labs labs.get_model_calc_dependencies( @@ -331,7 +916,22 @@ labs.get_model_calc_dependencies( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; Shows all dependencies for all objects in the semantic model. ### [get_notebook_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_notebook_definition) +#### Obtains the notebook definition. + ```python import sempy_labs as labs labs.get_notebook_definition( @@ -341,7 +941,27 @@ labs.get_notebook_definition( ) ``` +### Parameters +> **notebook_name** (str) +> +>> Required; The name of the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **decode** (bool, default=True) +> +>> Optional; If True, decodes the notebook definition file into .ipynb format. +If False, obtains the notebook definition file in base64 format. +> +### Returns +> ipynb; The notebook definition. ### [get_object_level_security](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_object_level_security) +#### Shows the object level security for the semantic model. + ```python import sempy_labs as labs labs.get_object_level_security( @@ -350,7 +970,22 @@ labs.get_object_level_security( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the object level security for the semantic model. ### [get_semantic_model_bim](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_semantic_model_bim) +#### Extracts the Model.bim file for a given semantic model. + ```python import sempy_labs as labs labs.get_semantic_model_bim( @@ -361,7 +996,32 @@ labs.get_semantic_model_bim( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the semantic model resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **save_to_file_name** (str, default=None) +> +>> Optional; If specified, saves the Model.bim as a file in the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the lakehouse attached to the workspace resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> dict; The Model.bim file for the semantic model. ### [get_spark_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_spark_settings) +#### Shows the spark settings for a workspace. + ```python import sempy_labs as labs labs.get_spark_settings( @@ -369,7 +1029,18 @@ labs.get_spark_settings( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the spark settings for a workspace. ### [import_notebook_from_web](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_notebook_from_web) +#### Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web. + ```python import sempy_labs as labs labs.import_notebook_from_web( @@ -380,7 +1051,29 @@ labs.import_notebook_from_web( ) ``` +### Parameters +> **notebook_name** (str) +> +>> Required; The name of the notebook to be created. +> +> **url** (str) +> +>> Required; The url of the Jupyter Notebook (.ipynb) +> +> **description** (str, default=None) +> +>> Optional; The description of the notebook. +Defaults to None which does not place a description. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [import_vertipaq_analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) +#### Imports and visualizes the vertipaq analyzer info from a saved .zip file in your lakehouse. + ```python import sempy_labs as labs labs.import_vertipaq_analyzer( @@ -389,7 +1082,20 @@ labs.import_vertipaq_analyzer( ) ``` +### Parameters +> **folder_path** (str) +> +>> Required; The folder within your lakehouse in which the .zip file containing the vertipaq analyzer info has been saved. +> +> **file_name** (str) +> +>> Required; The file name of the file which contains the vertipaq analyzer info. +> +### Returns +> str; A visualization of the Vertipaq Analyzer statistics. ### [initialize_git_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.initialize_git_connection) +#### Initializes a connection for a workspace that is connected to Git. + ```python import sempy_labs as labs labs.initialize_git_connection( @@ -397,7 +1103,16 @@ labs.initialize_git_connection( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [is_default_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.is_default_semantic_model) +#### Identifies whether a semantic model is a default semantic model. + ```python import sempy_labs as labs labs.is_default_semantic_model( @@ -406,13 +1121,32 @@ labs.is_default_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; The name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> bool; A True/False value indicating whether the semantic model is a default semantic model. ### [list_capacities](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_capacities) +#### Shows the capacities and their properties. + ```python import sempy_labs as labs labs.list_capacities() ``` +### Returns +> pandas.DataFrame; A pandas dataframe showing the capacities and their properties ### [list_custom_pools](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_custom_pools) +#### Lists all [custom pools](https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools) within a workspace. + ```python import sempy_labs as labs labs.list_custom_pools( @@ -420,7 +1154,18 @@ labs.list_custom_pools( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing all the custom pools within the Fabric workspace. ### [list_dashboards](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dashboards) +#### Shows a list of the dashboards within a workspace. + ```python import sempy_labs as labs labs.list_dashboards( @@ -428,13 +1173,28 @@ labs.list_dashboards( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the dashboards within a workspace. ### [list_dataflow_storage_accounts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflow_storage_accounts) +#### Shows the accessible dataflow storage accounts. + ```python import sempy_labs as labs labs.list_dataflow_storage_accounts() ``` +### Returns +> pandas.DataFrame; A pandas dataframe showing the accessible dataflow storage accounts. ### [list_dataflows](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflows) +#### Shows a list of all dataflows which exist within a workspace. + ```python import sempy_labs as labs labs.list_dataflows( @@ -442,7 +1202,18 @@ labs.list_dataflows( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the dataflows which exist within a workspace. ### [list_deployment_pipeline_stage_items](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipeline_stage_items) +#### Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline. + ```python import sempy_labs as labs labs.list_deployment_pipeline_stage_items( @@ -451,7 +1222,20 @@ labs.list_deployment_pipeline_stage_items( ) ``` +### Parameters +> **deployment_pipeline** (str) +> +>> Required; The deployment pipeline name. +> +> **stage_name** (str) +> +>> Required; The deployment pipeline stage name. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the supported items from the workspace assigned to the specified stage of the specified deployment pipeline. ### [list_deployment_pipeline_stages](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipeline_stages) +#### Shows the specified deployment pipeline stages. + ```python import sempy_labs as labs labs.list_deployment_pipeline_stages( @@ -459,13 +1243,26 @@ labs.list_deployment_pipeline_stages( ) ``` +### Parameters +> **deployment_pipeline** (str) +> +>> Required; The deployment pipeline name. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the specified deployment pipeline stages. ### [list_deployment_pipelines](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipelines) +#### Shows a list of deployment pipelines the user can access. + ```python import sempy_labs as labs labs.list_deployment_pipelines() ``` +### Returns +> pandas.DataFrame; A pandas dataframe showing a list of deployment pipelines the user can access. ### [list_lakehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_lakehouses) +#### Shows the lakehouses within a workspace. + ```python import sempy_labs as labs labs.list_lakehouses( @@ -473,7 +1270,18 @@ labs.list_lakehouses( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the lakehouses within a workspace. ### [list_qso_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_qso_settings) +#### Shows the query scale out settings for a semantic model (or all semantic models within a workspace). + ```python import sempy_labs as labs labs.list_qso_settings( @@ -482,7 +1290,22 @@ labs.list_qso_settings( ) ``` +### Parameters +> **dataset** (str, default=None) +> +>> Optional; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the query scale out settings. ### [list_reports_using_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_reports_using_semantic_model) +#### Shows a list of all the reports (in all workspaces) which use a given semantic model. + ```python import sempy_labs as labs labs.list_reports_using_semantic_model( @@ -491,7 +1314,22 @@ labs.list_reports_using_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the reports which use a given semantic model. ### [list_semantic_model_objects](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_objects) +#### Shows a list of semantic model objects. + ```python import sempy_labs as labs labs.list_semantic_model_objects( @@ -500,7 +1338,22 @@ labs.list_semantic_model_objects( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing a list of objects in the semantic model ### [list_shortcuts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_shortcuts) +#### Shows all shortcuts which exist in a Fabric lakehouse and their properties. + ```python import sempy_labs as labs labs.list_shortcuts( @@ -509,7 +1362,23 @@ labs.list_shortcuts( ) ``` +### Parameters +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse name. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which lakehouse resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing all the shortcuts which exist in the specified lakehouse. ### [list_warehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_warehouses) +#### Shows the warehouses within a workspace. + ```python import sempy_labs as labs labs.list_warehouses( @@ -517,7 +1386,18 @@ labs.list_warehouses( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the warehouses within a workspace. ### [list_workspace_role_assignments](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_workspace_role_assignments) +#### Shows the members of a given workspace. + ```python import sempy_labs as labs labs.list_workspace_role_assignments( @@ -525,7 +1405,18 @@ labs.list_workspace_role_assignments( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the members of a given workspace and their roles. ### [list_workspace_users](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_workspace_users) +#### A list of all the users of a workspace and their roles. + ```python import sempy_labs as labs labs.list_workspace_users( @@ -533,7 +1424,18 @@ labs.list_workspace_users( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe the users of a workspace and their properties. ### [measure_dependency_tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree) +#### Prints a measure dependency tree of all dependent objects for a measure in a semantic model. + ```python import sempy_labs as labs labs.measure_dependency_tree( @@ -543,7 +1445,24 @@ labs.measure_dependency_tree( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **measure_name** (str) +> +>> Required; Name of the measure. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [model_bpa_rules](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.model_bpa_rules) +#### Shows the default rules for the semantic model BPA used by the run_model_bpa function. + ```python import sempy_labs as labs labs.model_bpa_rules( @@ -553,7 +1472,26 @@ labs.model_bpa_rules( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **dependencies** (pd.DataFrame, default=None) +> +>> Optional; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> pandas.DataFrame; A pandas dataframe containing the default rules for the run_model_bpa function. ### [provision_workspace_identity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.provision_workspace_identity) +#### Provisions a workspace identity for a workspace. + ```python import sempy_labs as labs labs.provision_workspace_identity( @@ -561,7 +1499,16 @@ labs.provision_workspace_identity( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [qso_sync](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.qso_sync) +#### Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace. + ```python import sempy_labs as labs labs.qso_sync( @@ -570,7 +1517,20 @@ labs.qso_sync( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [qso_sync_status](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.qso_sync_status) +#### Returns the query scale-out sync status for the specified dataset from the specified workspace. + ```python import sempy_labs as labs labs.qso_sync_status( @@ -579,7 +1539,22 @@ labs.qso_sync_status( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> Tuple[pandas.DataFrame, pandas.DataFrame]; 2 pandas dataframes showing the query scale-out sync status. ### [refresh_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.refresh_semantic_model) +#### Refreshes a semantic model. + ```python import sempy_labs as labs labs.refresh_semantic_model( @@ -594,7 +1569,46 @@ labs.refresh_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **tables** (str, List[str], default=None) +> +>> Optional; A string or a list of tables to refresh. +> +> **partitions** (str, List[str], default=None) +> +>> Optional; A string or a list of partitions to refresh. Partitions must be formatted as such: 'Table Name'[Partition Name]. +> +> **refresh_type** (str, default='full') +> +>> Optional; The type of processing to perform. Types align with the TMSL refresh command types: full, clearValues, calculate, dataOnly, automatic, and defragment. The add type isn't supported. Defaults to "full". +> +> **retry_count** (int, default=0) +> +>> Optional; Number of times the operation retries before failing. +> +> **apply_refresh_policy** (bool, default=True) +> +>> Optional; If an incremental refresh policy is defined, determines whether to apply the policy. Modes are true or false. If the policy isn't applied, the full process leaves partition definitions unchanged, and fully refreshes all partitions in the table. If commitMode is transactional, applyRefreshPolicy can be true or false. If commitMode is partialBatch, applyRefreshPolicy of true isn't supported, and applyRefreshPolicy must be set to false. +> +> **max_parallelism** (int, default=10) +> +>> Optional; Determines the maximum number of threads that can run the processing commands in parallel. +This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods. +Defaults to 10. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [resolve_capacity_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_capacity_name) +#### Obtains the capacity name for a given capacity Id. + ```python import sempy_labs as labs labs.resolve_capacity_name( @@ -602,7 +1616,18 @@ labs.resolve_capacity_name( ) ``` +### Parameters +> **capacity_id** (UUID, default=None) +> +>> Optional; The capacity Id. +Defaults to None which resolves to the capacity name of the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook. +> +### Returns +> str; The capacity name. ### [resolve_dataset_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_dataset_id) +#### Obtains the ID of the semantic model. + ```python import sempy_labs as labs labs.resolve_dataset_id( @@ -611,7 +1636,22 @@ labs.resolve_dataset_id( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; The name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> UUID; The ID of the semantic model. ### [resolve_dataset_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_dataset_name) +#### Obtains the name of the semantic model. + ```python import sempy_labs as labs labs.resolve_dataset_name( @@ -620,7 +1660,22 @@ labs.resolve_dataset_name( ) ``` +### Parameters +> **dataset_id** (UUID) +> +>> Required; The name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; The name of the semantic model. ### [resolve_item_type](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_item_type) +#### Obtains the item type for a given Fabric Item Id within a Fabric workspace. + ```python import sempy_labs as labs labs.resolve_item_type( @@ -629,7 +1684,22 @@ labs.resolve_item_type( ) ``` +### Parameters +> **item_id** (UUID) +> +>> Required; The item/artifact Id. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; The item type for the item Id. ### [resolve_lakehouse_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_lakehouse_id) +#### Obtains the ID of the Fabric lakehouse. + ```python import sempy_labs as labs labs.resolve_lakehouse_id( @@ -638,7 +1708,22 @@ labs.resolve_lakehouse_id( ) ``` +### Parameters +> **lakehouse** (str) +> +>> Required; The name of the Fabric lakehouse. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> UUID; The ID of the Fabric lakehouse. ### [resolve_lakehouse_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_lakehouse_name) +#### Obtains the name of the Fabric lakehouse. + ```python import sempy_labs as labs labs.resolve_lakehouse_name( @@ -647,7 +1732,23 @@ labs.resolve_lakehouse_name( ) ``` +### Parameters +> **lakehouse_id** (UUID, default=None) +> +>> Optional; The name of the Fabric lakehouse. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; The name of the Fabric lakehouse. ### [resolve_report_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_report_id) +#### Obtains the ID of the Power BI report. + ```python import sempy_labs as labs labs.resolve_report_id( @@ -656,7 +1757,22 @@ labs.resolve_report_id( ) ``` +### Parameters +> **report** (str) +> +>> Required; The name of the Power BI report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> UUID; The ID of the Power BI report. ### [resolve_report_name](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_report_name) +#### Obtains the name of the Power BI report. + ```python import sempy_labs as labs labs.resolve_report_name( @@ -665,7 +1781,22 @@ labs.resolve_report_name( ) ``` +### Parameters +> **report_id** (UUID) +> +>> Required; The name of the Power BI report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; The name of the Power BI report. ### [resolve_workspace_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_workspace_capacity) +#### Obtains the capacity Id and capacity name for a given workspace. + ```python import sempy_labs as labs labs.resolve_workspace_capacity( @@ -673,7 +1804,18 @@ labs.resolve_workspace_capacity( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> Tuple[UUID, str]; capacity Id; capacity came. ### [restore_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model) +#### [Restores](https://learn.microsoft.com/power-bi/enterprise/service-premium-backup-restore-dataset) a semantic model based on a backup (.abf) file +within the ADLS Gen2 storage account connected to the workspace. ```python import sempy_labs as labs labs.restore_semantic_model( @@ -686,7 +1828,38 @@ labs.restore_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **file_path** (str) +> +>> Required; The location in which to backup the semantic model. Must end in '.abf'. +Example 1: file_path = 'MyModel.abf' +Example 2: file_path = 'MyFolder/MyModel.abf' +> +> **allow_overwrite** (bool, default=True) +> +>> Optional; If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location. +> +> **ignore_incompatibilities** (bool, default=True) +> +>> Optional; If True, ignores incompatibilities between Azure Analysis Services and Power BI Premium. +> +> **force_restore** (bool, default=False) +> +>> Optional; If True, restores the semantic model with the existing semantic model unloaded and offline. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [run_model_bpa](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) +#### Displays an HTML visualization of the results of the Best Practice Analyzer scan for a semantic model. + ```python import sempy_labs as labs labs.run_model_bpa( @@ -700,7 +1873,44 @@ labs.run_model_bpa( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **rules** (pandas.DataFrame, default=None) +> +>> Optional; A pandas dataframe containing rules to be evaluated. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **export** (bool, default=False) +> +>> Optional; If True, exports the resulting dataframe to a delta table in the lakehouse attached to the notebook. +> +> **return_dataframe** (bool, default=False) +> +>> Optional; If True, returns a pandas dataframe instead of the visualization. +> +> **extended** (bool, default=False) +> +>> Optional; If True, runs the set_vertipaq_annotations function to collect Vertipaq Analyzer statistics to be used in the analysis of the semantic model. +> +> **language** (str, default=None) +> +>> Optional; Specifying a language code (i.e. 'it-IT' for Italian) will auto-translate the Category, Rule Name and Description into the specified language. +Defaults to None which resolves to English. +> +### Returns +> pandas.DataFrame; A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules. ### [run_model_bpa_bulk](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa_bulk) +#### Runs the semantic model Best Practice Analyzer across all semantic models in a workspace (or all accessible workspaces). +Saves (appends) the results to the 'modelbparesults' delta table in the lakehouse attached to the notebook. +Default semantic models are skipped in this analysis. ```python import sempy_labs as labs labs.run_model_bpa_bulk( @@ -712,7 +1922,36 @@ labs.run_model_bpa_bulk( ) ``` +### Parameters +> **dataset** (str) +> +>> Optional; Name of the semantic model. +> +> **rules** (pandas.DataFrame, default=None) +> +>> Optional; A pandas dataframe containing rules to be evaluated. Based on the format of the dataframe produced by the model_bpa_rules function. +> +> **extended** (bool, default=False) +> +>> Optional; If True, runs the set_vertipaq_annotations function to collect Vertipaq Analyzer statistics to be used in the analysis of the semantic model. +> +> **language** (str, default=None) +> +>> Optional; The language (code) in which the rules will appear. For example, specifying 'it-IT' will show the Rule Name, Category and Description in Italian. +Defaults to None which resolves to English. +> +> **workspace** (str | List[str], default=None) +> +>> Optional; The workspace or list of workspaces to scan. +Defaults to None which scans all accessible workspaces. +> +> **skip_models** (str | List[str], default=['ModelBPA', 'Fabric Capacity Metrics']) +> +>> Optional; The semantic models to always skip when running this analysis. +> ### [save_as_delta_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.save_as_delta_table) +#### Saves a pandas dataframe as a delta table in a Fabric lakehouse. + ```python import sempy_labs as labs labs.save_as_delta_table( @@ -725,7 +1964,39 @@ labs.save_as_delta_table( ) ``` +### Parameters +> **dataframe** (pandas.DataFrame) +> +>> Required; The dataframe to be saved as a delta table. +> +> **delta_table_name** (str) +> +>> Required; The name of the delta table. +> +> **write_mode** (str) +> +>> Required; The write mode for the save operation. Options: 'append', 'overwrite'. +> +> **merge_schema** (bool, default=False) +> +>> Optional; Merges the schemas of the dataframe to the delta table. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> UUID; The ID of the Power BI report. ### [set_qso](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_qso) +#### Sets the query scale out settings for a semantic model. + ```python import sempy_labs as labs labs.set_qso( @@ -736,7 +2007,30 @@ labs.set_qso( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **auto_sync** (bool, default=True) +> +>> Optional; Whether the semantic model automatically syncs read-only replicas. +> +> **max_read_only_replicas** (int, default=-1) +> +>> Optional; To enable semantic model scale-out, set max_read_only_replicas to -1, or any non-0 value. A value of -1 allows Power BI to create as many read-only replicas as your Power BI capacity supports. You can also explicitly set the replica count to a value lower than that of the capacity maximum. Setting max_read_only_replicas to -1 is recommended. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the current query scale-out settings. ### [set_semantic_model_storage_format](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_semantic_model_storage_format) +#### Sets the semantic model storage format. + ```python import sempy_labs as labs labs.set_semantic_model_storage_format( @@ -746,7 +2040,24 @@ labs.set_semantic_model_storage_format( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **storage_format** (str) +> +>> Required; The storage format for the semantic model. Valid options: 'Large', 'Small'. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [set_workspace_default_storage_format](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_workspace_default_storage_format) +#### Sets the default storage format for semantic models within a workspace. + ```python import sempy_labs as labs labs.set_workspace_default_storage_format( @@ -755,7 +2066,20 @@ labs.set_workspace_default_storage_format( ) ``` +### Parameters +> **storage_format** (str) +> +>> Required; The storage format for the semantic model. Valid options: 'Large', 'Small'. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [translate_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model) +#### Translates names, descriptions, display folders for all objects in a semantic model. + ```python import sempy_labs as labs labs.translate_semantic_model( @@ -766,7 +2090,30 @@ labs.translate_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **languages** (str, List[str]) +> +>> Required; The language code(s) in which to translate the semantic model. +> +> **exclude_characters** (str) +> +>> Optional; A string specifying characters which will be replaced by a space in the translation text when sent to the translation service. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; Shows a pandas dataframe which displays all of the translations in the semantic model. ### [unassign_workspace_from_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.unassign_workspace_from_capacity) +#### Unassigns a workspace from its assigned capacity. + ```python import sempy_labs as labs labs.unassign_workspace_from_capacity( @@ -774,7 +2121,16 @@ labs.unassign_workspace_from_capacity( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [update_custom_pool](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_custom_pool) +#### Updates the properties of a [custom pool](https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools) within a workspace. + ```python import sempy_labs as labs labs.update_custom_pool( @@ -791,7 +2147,60 @@ labs.update_custom_pool( ) ``` +### Parameters +> **pool_name** (str) +> +>> Required; The custom pool name. +> +> **node_size** (str, default=None) +> +>> Optional; The [node size](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize). +Defaults to None which keeps the existing property setting. +> +> **min_node_count** (int, default=None) +> +>> Optional; The [minimum node count](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). +Defaults to None which keeps the existing property setting. +> +> **max_node_count** (int, default=None) +> +>> Optional; The [maximum node count](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). +Defaults to None which keeps the existing property setting. +> +> **min_executors** (int, default=None) +> +>> Optional; The [minimum executors](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). +Defaults to None which keeps the existing property setting. +> +> **max_executors** (int, default=None) +> +>> Optional; The [maximum executors](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). +Defaults to None which keeps the existing property setting. +> +> **node_family** (str, default=None) +> +>> Optional; The [node family](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodefamily). +Defaults to None which keeps the existing property setting. +> +> **auto_scale_enabled** (bool, default=None) +> +>> Optional; The status of [auto scale](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). +Defaults to None which keeps the existing property setting. +> +> **dynamic_executor_allocation_enabled** (bool, default=None) +> +>> Optional; The status of the [dynamic executor allocation](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). +Defaults to None which keeps the existing property setting. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [update_from_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_from_git) +#### Updates the workspace with commits pushed to the connected branch. + ```python import sempy_labs as labs labs.update_from_git( @@ -803,7 +2212,33 @@ labs.update_from_git( ) ``` +### Parameters +> **workspace_head** (str) +> +>> Optional; Full SHA hash that the workspace is synced to. This value may be null only after Initialize Connection. +In other cases, the system will validate that the given value is aligned with the head known to the system. +> +> **remove_commit_hash** (str) +> +>> Optional; Remote full SHA commit hash. +> +> **confilict_resolution_policy** (str) +> +>> Optional; The [conflict resolution policy](https://learn.microsoft.com/rest/api/fabric/core/git/update-from-git?tabs=HTTP#conflictresolutionpolicy). +> +> **allow_override** (bool, default=False) +> +>> Optional; None +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [update_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_item) +#### Updates the name/description of a Fabric item. + ```python import sempy_labs as labs labs.update_item( @@ -815,7 +2250,32 @@ labs.update_item( ) ``` +### Parameters +> **item_type** (str) +> +>> Required; Type of item to update. +> +> **current_name** (str) +> +>> Required; The current name of the item. +> +> **new_name** (str) +> +>> Required; The new name of the item. +> +> **description** (str, default=None) +> +>> Optional; A description of the item. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [update_spark_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_spark_settings) +#### Updates the spark settings for a workspace. + ```python import sempy_labs as labs labs.update_spark_settings( @@ -831,7 +2291,56 @@ labs.update_spark_settings( ) ``` +### Parameters +> **automatic_log_enabled** (bool, default=None) +> +>> Optional; The status of the [automatic log](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#automaticlogproperties). +Defaults to None which keeps the existing property setting. +> +> **high_concurrency_enabled** (bool, default=None) +> +>> Optional; The status of the [high concurrency](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#highconcurrencyproperties) for notebook interactive run. +Defaults to None which keeps the existing property setting. +> +> **customize_compute_enabled** (bool, default=None) +> +>> Optional; [Customize compute](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties) configurations for items. +Defaults to None which keeps the existing property setting. +> +> **default_pool_name** (str, default=None) +> +>> Optional; [Default pool](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties) for workspace. +Defaults to None which keeps the existing property setting. +> +> **max_node_count** (int, default=None) +> +>> Optional; The [maximum node count](https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties). +Defaults to None which keeps the existing property setting. +> +> **max_executors** (int, default=None) +> +>> Optional; The [maximum executors](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties). +Defaults to None which keeps the existing property setting. +> +> **environment_name** (str, default=None) +> +>> Optional; The name of the [default environment](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties). Empty string indicated there is no workspace default environment +Defaults to None which keeps the existing property setting. +> +> **runtime_version** (str, default=None) +> +>> Optional; The [runtime version](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties). +Defaults to None which keeps the existing property setting. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [update_workspace_user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_workspace_user) +#### Updates a user's role within a workspace. + ```python import sempy_labs as labs labs.update_workspace_user( @@ -842,7 +2351,28 @@ labs.update_workspace_user( ) ``` +### Parameters +> **email_address** (str) +> +>> Required; The email address of the user. +> +> **role_name** (str) +> +>> Required; The [role](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright) of the user within the workspace. +> +> **principal_type** (str, default='User') +> +>> Optional; The [principal type](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype). +> +> **workspace** (str, default=None) +> +>> Optional; The name of the workspace. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [vertipaq_analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer) +#### Displays an HTML visualization of the Vertipaq Analyzer statistics from a semantic model. + ```python import sempy_labs as labs labs.vertipaq_analyzer( @@ -853,7 +2383,30 @@ labs.vertipaq_analyzer( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **export** (str, default=None) +> +>> Optional; Specifying 'zip' will export the results to a zip file in your lakehouse (which can be imported using the import_vertipaq_analyzer function. +Specifying 'table' will export the results to delta tables (appended) in your lakehouse. +Default value: None. +> +> **read_stats_from_data** (bool, default=False) +> +>> Optional; Setting this parameter to true has the function get Column Cardinality and Missing Rows using DAX (Direct Lake semantic models achieve this using a Spark query to the lakehouse). +> ### [add_table_to_direct_lake_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.add_table_to_direct_lake_semantic_model) +#### Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -866,7 +2419,32 @@ directlake.add_table_to_direct_lake_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **table_name** (str, List[str]) +> +>> Required; Name of the table in the semantic model. +> +> **lakehouse_table_name** (str) +> +>> Required; The name of the Fabric lakehouse table. +> +> **refresh** (bool, default=True) +> +>> Optional; Refreshes the table after it is added to the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the semantic model resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [check_fallback_reason](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.check_fallback_reason) +#### Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -876,7 +2454,22 @@ directlake.check_fallback_reason( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; The tables in the semantic model and their fallback reason. ### [direct_lake_schema_compare](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.direct_lake_schema_compare) +#### Checks that all the tables in a Direct Lake semantic model map to tables in their corresponding lakehouse and that the columns in each table exist. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -886,7 +2479,20 @@ directlake.direct_lake_schema_compare( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [direct_lake_schema_sync](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.direct_lake_schema_sync) +#### Shows/adds columns which exist in the lakehouse but do not exist in the semantic model (only for tables in the semantic model). + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -897,7 +2503,24 @@ directlake.direct_lake_schema_sync( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **add_to_model** (bool, default=False) +> +>> Optional; If set to True, columns which exist in the lakehouse but do not exist in the semantic model are added to the semantic model. No new tables are added. +> ### [generate_direct_lake_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model) +#### Dynamically generates a Direct Lake semantic model based on tables in a Fabric lakehouse. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -912,14 +2535,54 @@ directlake.generate_direct_lake_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model to be created. +> +> **lakehouse_tables** (str | List[str]) +> +>> Required; The table(s) within the Fabric lakehouse to add to the semantic model. All columns from these tables will be added to the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the semantic model will reside. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **lakehouse** (str, default=None) +> +>> Optional; The lakehouse which stores the delta tables which will feed the Direct Lake semantic model. +Defaults to None which resolves to the attached lakehouse. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace in which the lakehouse resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **overwrite** (bool, default=False) +> +>> Optional; If set to True, overwrites the existing semantic model if it already exists. +> +> **refresh** (bool, default=True) +> +>> Optional; If True, refreshes the newly created semantic model after it is created. +> ### [get_direct_lake_guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_guardrails) +#### Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query +based on Microsoft's [online documentation](https://learn.microsoft.com/power-bi/enterprise/directlake-overview). ```python import sempy_labs as labs import sempy_labs.directlake as directlake directlake.get_direct_lake_guardrails() ``` +### Returns +> pandas.DataFrame; A table showing the Direct Lake guardrails by SKU. ### [get_direct_lake_lakehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_lakehouse) +#### Identifies the lakehouse used by a Direct Lake semantic model. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -931,7 +2594,33 @@ directlake.get_direct_lake_lakehouse( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str, uuid.UUID; The lakehouse name and lakehouse ID. ### [get_direct_lake_source](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_source) +#### Obtains the source information for a direct lake semantic model. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -941,7 +2630,24 @@ directlake.get_direct_lake_source( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; The name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> Tuple[str, str, UUID, UUID]; If the source of the direct lake semantic model is a lakehouse this will return: 'Lakehouse', Lakehouse Name, SQL Endpoint Id, Workspace Id +If the source of the direct lake semantic model is a warehouse this will return: 'Warehouse', Warehouse Name, Warehouse Id, Workspace Id +If the semantic model is not a Direct Lake semantic model, it will return None, None, None. ### [get_directlake_guardrails_for_sku](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_directlake_guardrails_for_sku) +#### Shows the guardrails for Direct Lake based on the SKU used by your workspace's capacity. +* Use the result of the 'get_sku_size' function as an input for this function's sku_size parameter.* ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -950,7 +2656,16 @@ directlake.get_directlake_guardrails_for_sku( ) ``` +### Parameters +> **sku_size** (str) +> +>> Required; Sku size of a workspace/capacity +> +### Returns +> pandas.DataFrame; A table showing the Direct Lake guardrails for the given SKU. ### [get_shared_expression](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_shared_expression) +#### Dynamically generates the M expression used by a Direct Lake model for a given lakehouse. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -960,7 +2675,23 @@ directlake.get_shared_expression( ) ``` +### Parameters +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint. ### [get_sku_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_sku_size) +#### Shows the SKU size for a workspace. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -969,7 +2700,18 @@ directlake.get_sku_size( ) ``` +### Parameters +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; The SKU size for a workspace. ### [list_direct_lake_model_calc_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.list_direct_lake_model_calc_tables) +#### Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery). + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -979,7 +2721,22 @@ directlake.list_direct_lake_model_calc_tables( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations. ### [show_unsupported_direct_lake_objects](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.show_unsupported_direct_lake_objects) +#### Returns a list of a semantic model's objects which are not supported by Direct Lake based on +[official documentation](https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations). ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -989,7 +2746,22 @@ directlake.show_unsupported_direct_lake_objects( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame, pandas.DataFrame, pandas.DataFrame; 3 pandas dataframes showing objects in a semantic model which are not supported by Direct Lake. ### [update_direct_lake_model_lakehouse_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_model_lakehouse_connection) +#### Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -1001,7 +2773,31 @@ directlake.update_direct_lake_model_lakehouse_connection( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [update_direct_lake_partition_entity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_partition_entity) +#### Remaps a table (or tables) in a Direct Lake semantic model to a table in a lakehouse. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -1013,7 +2809,28 @@ directlake.update_direct_lake_partition_entity( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **table_name** (str, List[str]) +> +>> Required; Name of the table(s) in the semantic model. +> +> **entity_name** (str, List[str]) +> +>> Required; Name of the lakehouse table to be mapped to the semantic model table. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [warm_direct_lake_cache_isresident](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_isresident) +#### Performs a refresh on the semantic model and puts the columns which were in memory prior to the refresh back into memory. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -1023,7 +2840,22 @@ directlake.warm_direct_lake_cache_isresident( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; Returns a pandas dataframe showing the columns that have been put into memory. ### [warm_direct_lake_cache_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_perspective) +#### Warms the cache of a Direct Lake semantic model by running a simple DAX query against the columns in a perspective. + ```python import sempy_labs as labs import sempy_labs.directlake as directlake @@ -1035,7 +2867,30 @@ directlake.warm_direct_lake_cache_perspective( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **perspective** (str) +> +>> Required; Name of the perspective which contains objects to be used for warming the cache. +> +> **add_dependencies** (bool, default=False) +> +>> Optional; Includes object dependencies in the cache warming process. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; Returns a pandas dataframe showing the columns that have been put into memory. ### [create_shortcut_onelake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.create_shortcut_onelake) +#### Creates a [shortcut](https://learn.microsoft.com/fabric/onelake/onelake-shortcuts) to a delta table in OneLake. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake @@ -1049,7 +2904,36 @@ lake.create_shortcut_onelake( ) ``` +### Parameters +> **table_name** (str) +> +>> Required; The table name for which a shortcut will be created. +> +> **source_lakehouse** (str) +> +>> Required; The Fabric lakehouse in which the table resides. +> +> **source_workspace** (str) +> +>> Required; The name of the Fabric workspace in which the source lakehouse exists. +> +> **destination_lakehouse** (str) +> +>> Required; The Fabric lakehouse in which the shortcut will be created. +> +> **destination_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the shortcut will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **shortcut_name** (str, default=None) +> +>> Optional; The name of the shortcut 'table' to be created. This defaults to the 'table_name' parameter value. +> ### [delete_shortcut](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.delete_shortcut) +#### Deletes a shortcut. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake @@ -1060,7 +2944,25 @@ lake.delete_shortcut( ) ``` +### Parameters +> **shortcut_name** (str) +> +>> Required; The name of the shortcut. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse name in which the shortcut resides. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which lakehouse resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [get_lakehouse_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_columns) +#### Shows the tables and columns of a lakehouse and their respective properties. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake @@ -1070,7 +2972,23 @@ lake.get_lakehouse_columns( ) ``` +### Parameters +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; Shows the tables/columns within a lakehouse and their properties. ### [get_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) +#### Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake @@ -1083,14 +3001,46 @@ lake.get_lakehouse_tables( ) ``` +### Parameters +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **extended** (bool, default=False) +> +>> Optional; Obtains additional columns relevant to the size of each table. +> +> **count_rows** (bool, default=False) +> +>> Optional; Obtains a row count for each lakehouse table. +> +> **export** (bool, default=False) +> +>> Optional; Exports the resulting dataframe to a delta table in the lakehouse. +> +### Returns +> pandas.DataFrame; Shows the tables/columns within a lakehouse and their properties. ### [lakehouse_attached](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.lakehouse_attached) +#### Identifies if a lakehouse is attached to the notebook. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake lake.lakehouse_attached() ``` +### Returns +> bool; Returns True if a lakehouse is attached to the notebook. ### [optimize_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.optimize_lakehouse_tables) +#### Runs the [OPTIMIZE](https://docs.delta.io/latest/optimizations-oss.html) function over the specified lakehouse tables. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake @@ -1101,7 +3051,26 @@ lake.optimize_lakehouse_tables( ) ``` +### Parameters +> **tables** (str | List[str], default=None) +> +>> Optional; The table(s) to optimize. +Defaults to None which resovles to optimizing all tables within the lakehouse. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [vacuum_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.vacuum_lakehouse_tables) +#### Runs the [VACUUM](https://docs.delta.io/latest/delta-utility.html#remove-files-no-longer-referenced-by-a-delta-table) function over the specified lakehouse tables. + ```python import sempy_labs as labs import sempy_labs.lakehouse as lake @@ -1113,7 +3082,35 @@ lake.vacuum_lakehouse_tables( ) ``` +### Parameters +> **tables** (str | List[str] | None) +> +>> Optional; The table(s) to vacuum. If no tables are specified, all tables in the lakehouse will be optimized. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **retain_n_hours** (int, default=None) +> +>> Optional; The number of hours to retain historical versions of Delta table files. +Files older than this retention period will be deleted during the vacuum operation. +If not specified, the default retention period configured for the Delta table will be used. +The default retention period is 168 hours (7 days) unless manually configured via table properties. +> ### [create_pqt_file](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.create_pqt_file) +#### Dynamically generates a [Power Query Template](https://learn.microsoft.com/power-query/power-query-template) file based on the semantic model. The .pqt file is +saved within the Files section of your lakehouse. + +Dataflows Gen2 has a `limit of 50 tables `_. If there are more than 50 tables, this will save multiple Power Query Template +files (with each file having a max of 50 tables). ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1124,7 +3121,24 @@ migration.create_pqt_file( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **file_name** (str, default='PowerQueryTemplate') +> +>> Optional; The name of the Power Query Template file to be generated. +> ### [migrate_calc_tables_to_lakehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_calc_tables_to_lakehouse) +#### Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model. +The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations. ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1138,7 +3152,41 @@ migration.migrate_calc_tables_to_lakehouse( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the import/DirectQuery semantic model. +> +> **new_dataset** (str) +> +>> Required; Name of the Direct Lake semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str) +> +>> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [migrate_calc_tables_to_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_calc_tables_to_semantic_model) +#### Creates new tables in the Direct Lake semantic model based on the lakehouse tables created using the 'migrate_calc_tables_to_lakehouse' function. + ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1152,7 +3200,41 @@ migration.migrate_calc_tables_to_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the import/DirectQuery semantic model. +> +> **new_dataset** (str) +> +>> Required; Name of the Direct Lake semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str) +> +>> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [migrate_field_parameters](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_field_parameters) +#### Migrates field parameters from one semantic model to another. + ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1164,7 +3246,30 @@ migration.migrate_field_parameters( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the import/DirectQuery semantic model. +> +> **new_dataset** (str) +> +>> Required; Name of the Direct Lake semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str) +> +>> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [migrate_model_objects_to_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_model_objects_to_semantic_model) +#### Adds the rest of the model objects (besides tables/columns) and their properties to a Direct Lake semantic model based on an import/DirectQuery semantic model. + ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1176,7 +3281,30 @@ migration.migrate_model_objects_to_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the import/DirectQuery semantic model. +> +> **new_dataset** (str) +> +>> Required; Name of the Direct Lake semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str) +> +>> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [migrate_tables_columns_to_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migrate_tables_columns_to_semantic_model) +#### Adds tables/columns to the new Direct Lake semantic model based on an import/DirectQuery semantic model. + ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1190,7 +3318,41 @@ migration.migrate_tables_columns_to_semantic_model( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the import/DirectQuery semantic model. +> +> **new_dataset** (str) +> +>> Required; Name of the Direct Lake semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str) +> +>> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **lakehouse** (str, default=None) +> +>> Optional; The Fabric lakehouse used by the Direct Lake semantic model. +Defaults to None which resolves to the lakehouse attached to the notebook. +> +> **lakehouse_workspace** (str, default=None) +> +>> Optional; The Fabric workspace used by the lakehouse. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [migration_validation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.migration_validation) +#### Shows the objects in the original semantic model and whether then were migrated successfully or not. + ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1202,7 +3364,32 @@ migration.migration_validation( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the import/DirectQuery semantic model. +> +> **new_dataset** (str) +> +>> Required; Name of the Direct Lake semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str) +> +>> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully. ### [refresh_calc_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.refresh_calc_tables) +#### Recreates the delta tables in the lakehouse based on the DAX expressions stored as model annotations in the Direct Lake semantic model. + ```python import sempy_labs as labs import sempy_labs.migration as migration @@ -1212,7 +3399,20 @@ migration.refresh_calc_tables( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [clone_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.clone_report) +#### Clones a Power BI report. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1226,7 +3426,40 @@ rep.clone_report( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the Power BI report. +> +> **cloned_report** (str) +> +>> Required; Name of the new Power BI report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **target_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace to place the cloned report. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **target_dataset** (str, default=None) +> +>> Optional; The name of the semantic model to be used by the cloned report. +Defaults to None which resolves to the semantic model used by the initial report. +> +> **target_dataset_workspace** (str, default=None) +> +>> Optional; The workspace in which the semantic model to be used by the report resides. +Defaults to None which resolves to the semantic model used by the initial report. +> ### [create_model_bpa_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.create_model_bpa_report) +#### Dynamically generates a Best Practice Analyzer report for analyzing semantic models. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1237,7 +3470,26 @@ rep.create_model_bpa_report( ) ``` +### Parameters +> **report** (str, default='ModelBPA') +> +>> Optional; Name of the report. +Defaults to 'ModelBPA'. +> +> **dataset** (str, default='ModelBPA') +> +>> Optional; Name of the semantic model which feeds this report. +Defaults to 'ModelBPA' +> +> **dataset_workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the semantic model resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [create_report_from_reportjson](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.create_report_from_reportjson) +#### Creates a report based on a report.json file (and an optional themes.json file). + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1250,7 +3502,32 @@ rep.create_report_from_reportjson( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the report. +> +> **dataset** (str) +> +>> Required; Name of the semantic model to connect to the report. +> +> **report_json** (dict) +> +>> Required; The report.json file to be used to create the report. +> +> **theme_json** (dict, default=None) +> +>> Optional; The theme.json file to be used for the theme of the report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [export_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.export_report) +#### Exports a Power BI report to a file in your lakehouse. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1266,7 +3543,45 @@ rep.export_report( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the Power BI report. +> +> **export_format** (str) +> +>> Required; The format in which to export the report. For image formats, enter the file extension in this parameter, not 'IMAGE'. +[Valid formats](https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat) +> +> **file_name** (str, default=None) +> +>> Optional; The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value. +> +> **bookmark_name** (str, default=None) +> +>> Optional; The name (GUID) of a bookmark within the report. +> +> **page_name** (str, default=None) +> +>> Optional; The name (GUID) of the report page. +> +> **visual_name** (str, default=None) +> +>> Optional; The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter. +> +> **report_filter** (str, default=None) +> +>> Optional; A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [get_report_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.get_report_definition) +#### Gets the collection of definition files of a report. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1276,7 +3591,22 @@ rep.get_report_definition( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the report resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> pandas.DataFrame; The collection of report definition files within a pandas dataframe. ### [get_report_json](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.get_report_json) +#### Gets the report.json file content of a Power BI report. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1287,7 +3617,26 @@ rep.get_report_json( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the Power BI report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the report exists. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **save_to_file_name** (str, default=None) +> +>> Optional; Specifying this parameter will save the report.json file to the lakehouse attached to the notebook with the file name of this parameter. +> +### Returns +> dict; The report.json file for a given Power BI report. ### [launch_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.launch_report) +#### Shows a Power BI report within a Fabric notebook. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1297,7 +3646,22 @@ rep.launch_report( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the Power BI report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +### Returns +> str; An embedded Power BI report within the notebook. ### [report_rebind](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind) +#### Rebinds a report to a semantic model. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1309,7 +3673,30 @@ rep.report_rebind( ) ``` +### Parameters +> **report** (str | List[str]) +> +>> Required; Name(s) of the Power BI report(s). +> +> **dataset** (str) +> +>> Required; Name of the semantic model. +> +> **report_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the report resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **dataset_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the semantic model resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [report_rebind_all](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind_all) +#### Rebinds all reports across all workspaces which are bound to a specific semantic model to a new semantic model. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1322,7 +3709,36 @@ rep.report_rebind_all( ) ``` +### Parameters +> **dataset** (str) +> +>> Required; Name of the semantic model currently binded to the reports. +> +> **new_dataset** (str) +> +>> Required; Name of the semantic model to rebind to the reports. +> +> **dataset_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the original semantic model resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **new_dataset_workspace** (str, default=None) +> +>> Optional; The name of the Fabric workspace in which the new semantic model resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> +> **report_workspace** (str | List[str], default=None) +> +>> Optional; The name(s) of the Fabric workspace(s) in which the report(s) reside(s). +Defaults to None which finds all reports in all workspaces which use the semantic model and rebinds them to +the new semantic model. +> ### [update_report_from_reportjson](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.update_report_from_reportjson) +#### Updates a report based on a report.json file. + ```python import sempy_labs as labs import sempy_labs.report as rep @@ -1333,7 +3749,24 @@ rep.update_report_from_reportjson( ) ``` +### Parameters +> **report** (str) +> +>> Required; Name of the report. +> +> **report_json** (dict) +> +>> Required; The report.json file to be used to update the report. +> +> **workspace** (str, default=None) +> +>> Optional; The Fabric workspace name in which the report resides. +Defaults to None which resolves to the workspace of the attached lakehouse +or if no lakehouse attached, resolves to the workspace of the notebook. +> ### [add_calculated_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculated_column) +#### Adds a calculated column to a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1353,7 +3786,55 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table in which the column will be created. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **expression** (str) +> +>> Required; The DAX expression for the column. +> +> **data_type** (str) +> +>> Required; The data type of the column. +> +> **format_string** (str, default=None) +> +>> Optional; Format string of the column. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the column will be hidden or visible. +> +> **description** (str, default=None) +> +>> Optional; A description of the column. +> +> **display_folder** (str, default=None) +> +>> Optional; The display folder in which the column will reside. +> +> **data_category** (str, default=None) +> +>> Optional; The data category of the column. +> +> **key** (bool, default=False) +> +>> Optional; Marks the column as the primary key of the table. +> +> **summarize_by** (str, default=None) +> +>> Optional; Sets the value for the Summarize By property of the column. +Defaults to None which resolves to 'Default'. +> ### [add_calculated_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculated_table) +#### Adds a calculated table to the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1367,7 +3848,30 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **name** (str) +> +>> Required; Name of the table. +> +> **expression** (str) +> +>> Required; The DAX expression for the calculated table. +> +> **description** (str, default=None) +> +>> Optional; A description of the table. +> +> **data_catgegory** (str, default=None) +> +>> Optional; The data category for the table. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the table is hidden or visible. +> ### [add_calculated_table_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculated_table_column) +#### Adds a calculated table column to a calculated table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1387,7 +3891,55 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table in which the column will be created. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **source_column** (str) +> +>> Required; The source column for the column. +> +> **data_type** (str) +> +>> Required; The data type of the column. +> +> **format_string** (str, default=None) +> +>> Optional; Format string of the column. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the column will be hidden or visible. +> +> **description** (str, default=None) +> +>> Optional; A description of the column. +> +> **display_folder** (str, default=None) +> +>> Optional; The display folder in which the column will reside. +> +> **data_category** (str, default=None) +> +>> Optional; The data category of the column. +> +> **key** (bool, default=False) +> +>> Optional; Marks the column as the primary key of the table. +> +> **summarize_by** (str, default=None) +> +>> Optional; Sets the value for the Summarize By property of the column. +Defaults to None resolves to 'Default'. +> ### [add_calculation_group](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculation_group) +#### Adds a [calculation group](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationgroup?view=analysisservices-dotnet) to a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1400,7 +3952,26 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **name** (str) +> +>> Required; Name of the calculation group. +> +> **precedence** (int) +> +>> Required; The precedence of the calculation group. +> +> **description** (str, default=None) +> +>> Optional; A description of the calculation group. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the calculation group is hidden/visible. +> ### [add_calculation_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_calculation_item) +#### Adds a [calculation item](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationitem?view=analysisservices-dotnet) to +a [calculation group](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.calculationgroup?view=analysisservices-dotnet) within a semantic model. ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1415,7 +3986,34 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table in which the calculation item will be created. +> +> **calculation_item_name** (str) +> +>> Required; Name of the calculation item. +> +> **expression** (str) +> +>> Required; The DAX expression for the calculation item. +> +> **ordinal** (int, default=None) +> +>> Optional; The ordinal of the calculation item. +> +> **format_string_expression** (str, default=None) +> +>> Optional; The format string expression for the calculation item. +> +> **description** (str, default=None) +> +>> Optional; A description of the calculation item. +> ### [add_data_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_data_column) +#### Adds a data column to a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1435,7 +4033,55 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table in which the column will be created. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **source_column** (str) +> +>> Required; The source column for the column. +> +> **data_type** (str) +> +>> Required; The data type of the column. +> +> **format_string** (str, default=None) +> +>> Optional; Format string of the column. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the column will be hidden or visible. +> +> **description** (str, default=None) +> +>> Optional; A description of the column. +> +> **display_folder** (str, default=None) +> +>> Optional; The display folder in which the column will reside. +> +> **data_category** (str, default=None) +> +>> Optional; The data category of the column. +> +> **key** (bool, default=False) +> +>> Optional; Marks the column as the primary key of the table. +> +> **summarize_by** (str, default=None) +> +>> Optional; Sets the value for the Summarize By property of the column. +Defaults to None resolves to 'Default'. +> ### [add_entity_partition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_entity_partition) +#### Adds an entity partition to a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1448,7 +4094,27 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **entity_name** (str) +> +>> Required; Name of the lakehouse table. +> +> **expression** (TOM Object, default=None) +> +>> Optional; The expression used by the table. +Defaults to None which resolves to the 'DatabaseQuery' expression. +> +> **description** (str, default=None) +> +>> Optional; A description for the partition. +> ### [add_expression](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_expression) +#### Adds an [expression](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.namedexpression?view=analysisservices-dotnet) to a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1460,7 +4126,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **name** (str) +> +>> Required; Name of the expression. +> +> **expression** (str) +> +>> Required; The M expression of the expression. +> +> **description** (str, default=None) +> +>> Optional; A description of the expression. +> ### [add_field_parameter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_field_parameter) +#### Adds a [field parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters) to the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1472,7 +4153,25 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **objects** (List[str]) +> +>> Required; The columns/measures to be included in the field parameter. +Columns must be specified as such : 'Table Name'[Column Name]. +Measures may be formatted as '[Measure Name]' or 'Measure Name'. +> +> **object_names** (List[str], default=None) +> +>> Optional; The corresponding visible name for the measures/columns in the objects list. +Defaults to None which shows the measure/column name. +> ### [add_hierarchy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_hierarchy) +#### Adds a [hierarchy](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.hierarchy?view=analysisservices-dotnet) to a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1487,7 +4186,34 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **hierarchy_name** (str) +> +>> Required; Name of the hierarchy. +> +> **columns** (List[str]) +> +>> Required; Names of the columns to use within the hierarchy. +> +> **levels** (List[str], default=None) +> +>> Optional; Names of the levels to use within the hierarhcy (instead of the column names). +> +> **hierarchy_description** (str, default=None) +> +>> Optional; A description of the hierarchy. +> +> **hierarchy_hidden** (bool, default=False) +> +>> Optional; Whether the hierarchy is visible or hidden. +> ### [add_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_incremental_refresh_policy) +#### Adds an [incremental refresh](https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview) policy for a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1506,7 +4232,51 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; The DateTime column to be used for the RangeStart and RangeEnd parameters. +> +> **start_date** (str) +> +>> Required; The date to be used for the RangeStart parameter. +> +> **end_date** (str) +> +>> Required; The date to be used for the RangeEnd parameter. +> +> **incremental_granularity** (str) +> +>> Required; Granularity of the (most recent) incremental refresh range. +> +> **incremental_periods** (int) +> +>> Required; Number of periods for the incremental refresh range. +> +> **rolling_window_granularity** (str) +> +>> Required; Target granularity of the rolling window for the whole semantic model. +> +> **rolling_window_periods** (int) +> +>> Required; Number of periods for the rolling window for the whole semantic model. +> +> **only_refresh_complete_days** (bool, default=False) +> +>> Optional; Lag or leading periods from Now() to the rolling window head. +> +> **detect_data_changes_column** (str, default=None) +> +>> Optional; The column to use for detecting data changes. +Defaults to None which resolves to not detecting data changes. +> ### [add_m_partition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_m_partition) +#### Adds an M-partition to a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1520,7 +4290,32 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **partition_name** (str) +> +>> Required; Name of the partition. +> +> **expression** (str) +> +>> Required; The M expression encapsulating the logic for the partition. +> +> **mode** (str, default=None) +> +>> Optional; The query mode for the partition. +Defaults to None which resolves to 'Import'. +[Valid mode values](https://learn.microsoft.com/en-us/dotnet/api/microsoft.analysisservices.tabular.modetype?view=analysisservices-dotnet) +> +> **description** (str, default=None) +> +>> Optional; A description for the partition. +> ### [add_measure](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_measure) +#### Adds a measure to the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1537,7 +4332,42 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table in which the measure will be created. +> +> **measure_name** (str) +> +>> Required; Name of the measure. +> +> **expression** (str) +> +>> Required; DAX expression of the measure. +> +> **format_string** (str, default=None) +> +>> Optional; Format string of the measure. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the measure will be hidden or visible. +> +> **description** (str, default=None) +> +>> Optional; A description of the measure. +> +> **display_folder** (str, default=None) +> +>> Optional; The display folder in which the measure will reside. +> +> **format_string_expression** (str, default=None) +> +>> Optional; The format string expression. +> ### [add_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_perspective) +#### Adds a [perspective](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet) to a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1547,7 +4377,14 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **perspective_name** (str) +> +>> Required; Name of the perspective. +> ### [add_relationship](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_relationship) +#### Adds a [relationship](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.singlecolumnrelationship?view=analysisservices-dotnet) to a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1566,7 +4403,52 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **from_table** (str) +> +>> Required; Name of the table on the 'from' side of the relationship. +> +> **from_column** (str) +> +>> Required; Name of the column on the 'from' side of the relationship. +> +> **to_table** (str) +> +>> Required; Name of the table on the 'to' side of the relationship. +> +> **to_column** (str) +> +>> Required; Name of the column on the 'to' side of the relationship. +> +> **from_cardinality** (str) +> +>> Required; The cardinality of the 'from' side of the relationship. Options: ['Many', 'One', 'None']. +> +> **to_cardinality** (str) +> +>> Required; The cardinality of the 'to' side of the relationship. Options: ['Many', 'One', 'None']. +> +> **cross_filtering_behavior** (str, default=None) +> +>> Optional; Setting for the cross filtering behavior of the relationship. Options: ('Automatic', 'OneDirection', 'BothDirections'). +Defaults to None which resolves to 'Automatic'. +> +> **is_active** (bool, default=True) +> +>> Optional; Setting for whether the relationship is active or not. +> +> **security_filtering_behavior** (str, default=None) +> +>> Optional; Setting for the security filtering behavior of the relationship. Options: ('None', 'OneDirection', 'BothDirections'). +Defaults to None which resolves to 'OneDirection'. +> +> **rely_on_referential_integrity** (bool, default=False) +> +>> Optional; Setting for the rely on referential integrity of the relationship. +> ### [add_role](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_role) +#### Adds a role to a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1578,7 +4460,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **role_name** (str) +> +>> Required; Name of the role. +> +> **model_permission** (str, default=None) +> +>> Optional; The model permission for the role. +Defaults to None which resolves to 'Read'. +> +> **description** (str, default=None) +> +>> Optional; A description of the role. +> ### [add_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_table) +#### Adds a table to the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1591,7 +4489,26 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **name** (str) +> +>> Required; Name of the table. +> +> **description** (str, default=None) +> +>> Optional; A description of the table. +> +> **data_catgegory** (str, default=None) +> +>> Optional; The data category for the table. +> +> **hidden** (bool, default=False) +> +>> Optional; Whether the table is hidden or visible. +> ### [add_time_intelligence](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_time_intelligence) +#### Adds time intelligence measures + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1603,7 +4520,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **measure_name** (str) +> +>> Required; Name of the measure +> +> **date_table** (str) +> +>> Required; Name of the date table. +> +> **time_intel** (str, List[str]) +> +>> Required; Time intelligence measures to create (i.e. MTD, YTD, QTD). +> ### [add_to_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_to_perspective) +#### Adds an object to a [perspective](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1614,7 +4546,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **perspective_name** (str) +> +>> Required; Name of the perspective. +> ### [add_translation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_translation) +#### Adds a [translation language](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.culture?view=analysisservices-dotnet) (culture) to a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1624,7 +4567,14 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **language** (str) +> +>> Required; The language code (i.e. 'it-IT' for Italian). +> ### [all_calculated_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculated_columns) +#### Outputs a list of all calculated columns within all tables in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1632,7 +4582,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_calculated_columns() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Column]; All calculated columns within the semantic model. ### [all_calculated_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculated_tables) +#### Outputs a list of all calculated tables in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1640,7 +4594,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_calculated_tables() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Table]; All calculated tables within the semantic model. ### [all_calculation_groups](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculation_groups) +#### Outputs a list of all calculation groups in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1648,7 +4606,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_calculation_groups() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Table]; All calculation groups within the semantic model. ### [all_calculation_items](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_calculation_items) +#### Outputs a list of all calculation items in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1656,7 +4618,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_calculation_items() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.CalculationItem]; All calculation items within the semantic model. ### [all_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_columns) +#### Outputs a list of all columns within all tables in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1664,7 +4630,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_columns() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Column]; All columns within the semantic model. ### [all_date_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_date_tables) +#### Outputs the tables which are marked as [date tables](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables) within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1672,7 +4642,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_date_tables() ``` +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection; All tables marked as date tables within a semantic model. ### [all_hierarchies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_hierarchies) +#### Outputs a list of all hierarchies in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1680,7 +4654,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_hierarchies() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Hierarchy]; All hierarchies within the semantic model. ### [all_hybrid_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_hybrid_tables) +#### Outputs the [hybrid tables](https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables) within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1688,7 +4666,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_hybrid_tables() ``` +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection; All hybrid tables within a semantic model. ### [all_levels](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_levels) +#### Outputs a list of all levels in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1696,7 +4678,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_levels() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Level]; All levels within the semantic model. ### [all_measures](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_measures) +#### Outputs a list of all measures in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1704,7 +4690,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_measures() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Measure]; All measures within the semantic model. ### [all_partitions](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_partitions) +#### Outputs a list of all partitions in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1712,7 +4702,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_partitions() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.Partition]; All partitions within the semantic model. ### [all_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.all_rls) +#### Outputs a list of all row level security expressions in the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1720,7 +4714,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.all_rls() ``` +### Returns +> Iterator[Microsoft.AnalysisServices.Tabular.TablePermission]; All row level security expressions within the semantic model. ### [apply_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.apply_refresh_policy) +#### [Applies the incremental refresh](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.table.applyrefreshpolicy?view=analysisservices-dotnet#microsoft-analysisservices-tabular-table-applyrefreshpolicy(system-boolean-system-int32)) policy for a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1733,7 +4731,26 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **effective_date** (DateTime, default=None) +> +>> Optional; The effective date that is used when calculating the partitioning scheme. +> +> **refresh** (bool, default=True) +> +>> Optional; An indication if partitions of the table should be refreshed or not; the default behavior is to do the refresh. +> +> **max_parallelism** (int, default=0) +> +>> Optional; The degree of parallelism during the refresh execution. +> ### [cardinality](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.cardinality) +#### Obtains the cardinality of a column within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1743,7 +4760,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **column** (TOM Object) +> +>> Required; The column object within the semantic model. +> +### Returns +> int; Cardinality of the TOM column. ### [clear_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.clear_annotations) +#### Removes all [annotations](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.annotation?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1753,7 +4779,14 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> ### [clear_extended_properties](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.clear_extended_properties) +#### Removes all [extended properties](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedproperty?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1763,7 +4796,14 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> ### [data_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.data_size) +#### Obtains the data size of a column within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1773,7 +4813,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **column** (TOM Object) +> +>> Required; The column object within the semantic model. +> +### Returns +> int; Data size of the TOM column. ### [depends_on](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.depends_on) +#### Obtains the objects on which the specified object depends. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1784,7 +4833,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The TOM object within the semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection; Objects on which the specified object depends. ### [dictionary_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.dictionary_size) +#### Obtains the dictionary size of a column within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1794,7 +4856,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **column** (TOM Object) +> +>> Required; The column object within the semantic model. +> +### Returns +> int; Dictionary size of the TOM column. ### [fully_qualified_measures](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.fully_qualified_measures) +#### Obtains all fully qualified measure references for a given object. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1805,7 +4876,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The TOM object within the semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.MeasureCollection; All fully qualified measure references for a given object. ### [get_annotation_value](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_annotation_value) +#### Obtains the [annotation](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.annotation?view=analysisservices-dotnet) value for a given annotation on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1816,7 +4900,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **name** (str) +> +>> Required; Name of the annotation. +> +### Returns +> str; The annotation value. ### [get_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_annotations) +#### Shows all [annotations](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.annotation?view=analysisservices-dotnet) for a given object within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1826,7 +4923,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +### Returns +> Microsoft.AnalysisServices.Tabular.Annotation; TOM objects of all the annotations on a particular object within the semantic model. ### [get_extended_properties](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_extended_properties) +#### Retrieves all [extended properties](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedproperty?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1836,7 +4942,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +### Returns +> Microsoft.AnalysisServices.Tabular.ExtendedPropertiesCollection; TOM Objects of all the extended properties. ### [get_extended_property_value](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_extended_property_value) +#### Retrieves the value of an [extended property](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedproperty?view=analysisservices-dotnet) for an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1847,7 +4962,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **name** (str) +> +>> Required; Name of the annotation. +> +### Returns +> str; The extended property value. ### [has_aggs](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_aggs) +#### Identifies if a semantic model has any [aggregations](https://learn.microsoft.com/power-bi/transform-model/aggregations-advanced). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1855,7 +4983,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.has_aggs() ``` +### Returns +> bool; Indicates if the semantic model has any aggregations. ### [has_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_date_table) +#### Identifies if a semantic model has a table marked as a [date table](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1863,7 +4995,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.has_date_table() ``` +### Returns +> bool; Indicates if the semantic model has a table marked as a date table. ### [has_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_hybrid_table) +#### Identifies if a semantic model has a [hybrid table](https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1871,7 +5007,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.has_hybrid_table() ``` +### Returns +> bool; Indicates if the semantic model has a hybrid table. ### [has_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_incremental_refresh_policy) +#### Identifies whether a table has an [incremental refresh](https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview) policy. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1881,7 +5021,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; An indicator whether a table has an incremental refresh policy. ### [in_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.in_perspective) +#### Indicates whether an object is contained within a given [perspective](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1892,7 +5041,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **perspecitve_name** (str) +> +>> Required; Name of the perspective. +> +### Returns +> bool; An indication as to whether the object is contained within the given perspective. ### [is_agg_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_agg_table) +#### Identifies if a table has [aggregations](https://learn.microsoft.com/power-bi/transform-model/aggregations-advanced). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1902,7 +5064,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; Indicates if the table has any aggregations. ### [is_auto_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_auto_date_table) +#### Identifies if a table is an `auto date/time table `_. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1912,7 +5083,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; Indicates if the table is an auto-date table. ### [is_calculated_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_calculated_table) +#### Identifies if a table is a calculated table. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1922,7 +5102,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; A boolean value indicating whether the table is a calculated table. ### [is_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_date_table) +#### Identifies if a table is marked as a [date tables](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1932,7 +5121,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; Indicates if the table is marked as a date table. ### [is_direct_lake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake) +#### Identifies if a semantic model is in [Direct Lake](https://learn.microsoft.com/fabric/get-started/direct-lake-overview) mode. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1940,7 +5138,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.is_direct_lake() ``` +### Returns +> bool; Indicates if the semantic model is in Direct Lake mode. ### [is_direct_lake_using_view](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake_using_view) +#### Identifies whether a semantic model is in Direct lake mode and uses views from the lakehouse. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1948,7 +5150,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: tom.is_direct_lake_using_view() ``` +### Returns +> bool; An indicator whether a semantic model is in Direct lake mode and uses views from the lakehouse. ### [is_field_parameter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_field_parameter) +#### Identifies if a table is a [field parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1958,7 +5164,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; Indicates if the table is a field parameter. ### [is_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_hybrid_table) +#### Identifies if a table is a [hybrid table](https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1968,7 +5183,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +### Returns +> bool; Indicates if the table is a hybrid table. ### [mark_as_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.mark_as_date_table) +#### Marks a table as a [date table](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1979,7 +5203,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the date column in the table. +> ### [records_per_segment](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.records_per_segment) +#### Obtains the records per segment of a partition within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -1989,7 +5224,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The partition object within the semantic model. +> +### Returns +> float; Number of records per segment within the partition. ### [referenced_by](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.referenced_by) +#### Obtains the objects which reference the specified object. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2000,7 +5244,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The TOM object within the semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection; Objects which reference the specified object. ### [remove_alternate_of](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_alternate_of) +#### Removes the [alternate of](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.alternateof?view=analysisservices-dotnet) property on a column. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2011,7 +5268,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> ### [remove_annotation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_annotation) +#### Removes an [annotation](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.annotation?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2022,7 +5290,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **name** (str) +> +>> Required; Name of the annotation. +> ### [remove_extended_property](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_extended_property) +#### Removes an [extended property](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedproperty?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2033,7 +5312,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **name** (str) +> +>> Required; Name of the annotation. +> ### [remove_from_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_from_perspective) +#### Removes an object from a [perspective](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet). + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2044,7 +5334,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **perspective_name** (str) +> +>> Required; Name of the perspective. +> ### [remove_object](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_object) +#### Removes an object from a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2054,7 +5355,14 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> ### [remove_sort_by_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_sort_by_column) +#### Removes the sort by column for a column in a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2065,7 +5373,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> ### [remove_translation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_translation) +#### Removes an object's [translation](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.culture?view=analysisservices-dotnet) value. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2076,7 +5395,17 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **language** (str) +> +>> Required; The language code. +> ### [remove_vertipaq_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.remove_vertipaq_annotations) +#### Removes the annotations set using the set_vertipaq_annotations function. ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2085,6 +5414,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ``` ### [row_count](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.row_count) +#### Obtains the row count of a table or partition within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2094,7 +5425,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The table/partition object within the semantic model. +> +### Returns +> int; Number of rows within the TOM object. ### [set_aggregations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_aggregations) +#### Sets the [aggregations](https://learn.microsoft.com/power-bi/transform-model/aggregations-advanced) (alternate of) for all the columns in an aggregation table based on a base table. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2105,7 +5445,18 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the base table. +> +> **agg_table_name** (str) +> +>> Required; Name of the aggregation table. +> ### [set_alternate_of](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_alternate_of) +#### Sets the [alternate of](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.alternateof?view=analysisservices-dotnet) property on a column. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2119,7 +5470,31 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **summarization_type** (str) +> +>> Required; The summarization type for the column. +[Summarization valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.alternateof.summarization?view=analysisservices-dotnet#microsoft-analysisservices-tabular-alternateof-summarization) +> +> **base_table** (str) +> +>> Required; Name of the base table for aggregation. +> +> **base_column** (str) +> +>> Optional; Name of the base column for aggregation +> ### [set_annotation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_annotation) +#### Sets an [annotation](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.annotation?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2131,7 +5506,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **name** (str) +> +>> Required; Name of the annotation. +> +> **value** (str) +> +>> Required; Value of the annotation. +> ### [set_data_coverage_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_data_coverage_definition) +#### Sets the [data coverage definition](https://learn.microsoft.com/analysis-services/tom/table-partitions?view=asallproducts-allversions) for a partition. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2143,7 +5533,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **partition_name** (str) +> +>> Required; Name of the partition. +> +> **expression** (str) +> +>> Required; DAX expression containing the logic for the data coverage definition. +> ### [set_data_type](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_data_type) +#### Sets the [data type](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.datatype?view=analysisservices-dotnet) for a column. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2155,7 +5560,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **value** (str) +> +>> Required; The data type. +[Data type valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.datatype?view=analysisservices-dotnet) +> ### [set_direct_lake_behavior](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_direct_lake_behavior) +#### Sets the [Direct Lake Behavior](https://learn.microsoft.com/fabric/get-started/direct-lake-overview#fallback-behavior) property for a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2165,7 +5586,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **direct_lake_behavior** (str) +> +>> Required; The DirectLakeBehavior property value. +[DirectLakeBehavior valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.directlakebehavior?view=analysisservices-dotnet) +> ### [set_encoding_hint](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_encoding_hint) +#### Sets the [encoding hint](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.encodinghinttype?view=analysisservices-dotnet) for a column. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2177,7 +5606,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **value** (str) +> +>> Required; Encoding hint value. +[Encoding hint valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.encodinghinttype?view=analysisservices-dotnet) +> ### [set_extended_property](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_extended_property) +#### Sets an [extended property](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedproperty?view=analysisservices-dotnet) on an object within the semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2190,7 +5635,27 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **extended_property_type** (str) +> +>> Required; The extended property type. +[Extended property valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedpropertytype?view=analysisservices-dotnet) +> +> **name** (str) +> +>> Required; Name of the extended property. +> +> **value** (str) +> +>> Required; Value of the extended property. +> ### [set_is_available_in_mdx](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_is_available_in_mdx) +#### Sets the [IsAvailableInMDX](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.column.isavailableinmdx?view=analysisservices-dotnet#microsoft-analysisservices-tabular-column-isavailableinmdx) property on a column. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2202,7 +5667,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **value** (bool, default=False) +> +>> Optional; The IsAvailableInMdx property value. +> ### [set_kpi](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_kpi) +#### Sets the properties to add/update a [KPI](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.kpi?view=analysisservices-dotnet) for a measure. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2219,7 +5699,44 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **measure_name** (str) +> +>> Required; Name of the measure. +> +> **target** (str, int, float) +> +>> Required; The target for the KPI. This can either be a number or the name of a different measure in the semantic model. +> +> **lower_bound** (float) +> +>> Required; The lower bound for the KPI. +> +> **upper_bound** (float) +> +>> Required; The upper bound for the KPI. +> +> **lower_mid_bound** (float, default=None) +> +>> Optional; The lower-mid bound for the KPI. Set this if status_type is 'Centered' or 'CenteredReversed'. +> +> **upper_mid_bound** (float, default=None) +> +>> Optional; The upper-mid bound for the KPI. Set this if status_type is 'Centered' or 'CenteredReversed'. +> +> **status_type** (str, default=None) +> +>> Optional; The status type of the KPI. Options: 'Linear', 'LinearReversed', 'Centered', 'CenteredReversed'. +Defaults to None which resolvs to 'Linear'. +> +> **status_graphic** (str, default=None) +> +>> Optional; The status graphic for the KPI. +Defaults to 'Three Circles Colored'. +> ### [set_ols](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_ols) +#### Sets the object level security permissions for a column within a role. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2232,7 +5749,27 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **role_name** (str) +> +>> Required; Name of the role. +> +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **permission** (str) +> +>> Required; The object level security permission for the column. +[Permission valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.metadatapermission?view=analysisservices-dotnet) +> ### [set_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_rls) +#### Sets the row level security permissions for a table within a role. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2244,7 +5781,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **role_name** (str) +> +>> Required; Name of the role. +> +> **table_name** (str) +> +>> Required; Name of the table. +> +> **filter_expression** (str) +> +>> Required; The DAX expression containing the row level security filter expression logic. +> ### [set_sort_by_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_sort_by_column) +#### Sets the sort by column for a column in a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2256,7 +5808,22 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **sort_by_column** (str) +> +>> Required; Name of the column to use for sorting. Must be of integer (Int64) data type. +> ### [set_summarize_by](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_summarize_by) +#### Sets the [SummarizeBy](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.column.summarizeby?view=analysisservices-dotnet#microsoft-analysisservices-tabular-column-summarizeby) property on a column. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2268,7 +5835,24 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **value** (bool, default=None) +> +>> Optional; The SummarizeBy property value. +Defaults to none which resolves to 'Default'. +[Aggregate valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.aggregatefunction?view=analysisservices-dotnet) +> ### [set_translation](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_translation) +#### Sets a [translation](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.culture?view=analysisservices-dotnet) value for an object's property. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2281,7 +5865,25 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column/measure) within a semantic model. +> +> **language** (str) +> +>> Required; The language code. +> +> **property** (str) +> +>> Required; The property to set. Options: 'Name', 'Description', 'Display Folder'. +> +> **value** (str) +> +>> Required; The transation value. +> ### [set_vertipaq_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.set_vertipaq_annotations) +#### Saves Vertipaq Analyzer statistics as annotations on objects in the semantic model. ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2290,6 +5892,8 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ``` ### [show_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.show_incremental_refresh_policy) +#### Prints the [incremental refresh](https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview) policy for a table. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2299,7 +5903,14 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> ### [total_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.total_size) +#### Obtains the data size of a table/column within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2309,7 +5920,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The table/column object within the semantic model. +> +### Returns +> int; Total size of the TOM table/column. ### [unqualified_columns](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.unqualified_columns) +#### Obtains all unqualified column references for a given object. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2320,7 +5940,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; The TOM object within the semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.ColumnCollection; All unqualified column references for a given object. ### [update_calculation_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_calculation_item) +#### Updates a calculation item within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2335,7 +5968,38 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the calculation group (table). +> +> **calculation_item_name** (str) +> +>> Required; Name of the calculation item. +> +> **expression** (str, default=None) +> +>> Optional; The DAX expression of the calculation item. +Defaults to None which keeps the existing setting. +> +> **ordinal** (int, default=None) +> +>> Optional; The ordinal of the calculation item. +Defaults to None which keeps the existing setting. +> +> **description** (str, default=None) +> +>> Optional; The description of the role. +Defaults to None which keeps the existing setting. +> +> **format_string_expression** (str, default=None) +> +>> Optional; The format string expression for the calculation item. +Defaults to None which keeps the existing setting. +> ### [update_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_column) +#### Updates a column within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2356,7 +6020,68 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table in which the column exists. +> +> **column_name** (str) +> +>> Required; Name of the column. +> +> **source_column** (str, default=None) +> +>> Optional; The source column for the column (for data columns only). +Defaults to None which keeps the existing setting. +> +> **data_type** (str, default=None) +> +>> Optional; The data type of the column. +Defaults to None which keeps the existing setting. +> +> **expression** (str, default=None) +> +>> Optional; The DAX expression of the column (for calculated columns only). +Defaults to None which keeps the existing setting. +> +> **format_string** (str, default=None) +> +>> Optional; Format string of the column. +Defaults to None which keeps the existing setting. +> +> **hidden** (bool, default=None) +> +>> Optional; Whether the column will be hidden or visible. +Defaults to None which keeps the existing setting. +> +> **description** (str, default=None) +> +>> Optional; A description of the column. +Defaults to None which keeps the existing setting. +> +> **display_folder** (str, default=None) +> +>> Optional; The display folder in which the column will reside. +Defaults to None which keeps the existing setting. +> +> **data_category** (str, default=None) +> +>> Optional; The data category of the column. +Defaults to None which keeps the existing setting. +> +> **key** (bool, default=False) +> +>> Optional; Marks the column as the primary key of the table. +Defaults to None which keeps the existing setting. +> +> **summarize_by** (str, default=None) +> +>> Optional; Sets the value for the Summarize By property of the column. +Defaults to None which keeps the existing setting. +> ### [update_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_incremental_refresh_policy) +#### Updates the [incremental refresh](https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview) policy for a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2372,7 +6097,39 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **incremental_granularity** (str) +> +>> Required; Granularity of the (most recent) incremental refresh range. +> +> **incremental_periods** (int) +> +>> Required; Number of periods for the incremental refresh range. +> +> **rolling_window_granularity** (str) +> +>> Required; Target granularity of the rolling window for the whole semantic model. +> +> **rolling_window_periods** (int) +> +>> Required; Number of periods for the rolling window for the whole semantic model. +> +> **only_refresh_complete_days** (bool, default=False) +> +>> Optional; Lag or leading periods from Now() to the rolling window head. +> +> **detect_data_changes_column** (str, default=None) +> +>> Optional; The column to use for detecting data changes. +Defaults to None which resolves to not detecting data changes. +> ### [update_m_partition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_m_partition) +#### Updates an M partition for a table within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2386,7 +6143,33 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **table_name** (str) +> +>> Required; Name of the table. +> +> **partition_name** (str) +> +>> Required; Name of the partition. +> +> **expression** (str, default=None) +> +>> Optional; The [M expression](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.mpartitionsource.expression?view=analysisservices-dotnet) containing the logic for the partition. +Defaults to None which keeps the existing setting. +> +> **mode** (str, default=None) +> +>> Optional; The query [mode](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.modetype?view=analysisservices-dotnet) of the partition. +Defaults to None which keeps the existing setting. +> +> **description** (str, default=None) +> +>> Optional; The description of the partition. +Defaults to None which keeps the existing setting. +> ### [update_measure](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_measure) +#### Updates a measure within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2402,7 +6185,44 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **measure_name** (str) +> +>> Required; Name of the measure. +> +> **expression** (str, default=None) +> +>> Optional; DAX expression of the measure. +Defaults to None which keeps the existing setting. +> +> **format_string** (str, default=None) +> +>> Optional; Format string of the measure. +Defaults to None which keeps the existing setting. +> +> **hidden** (bool, default=None) +> +>> Optional; Whether the measure will be hidden or visible. +Defaults to None which keeps the existing setting. +> +> **description** (str, default=None) +> +>> Optional; A description of the measure. +Defaults to None which keeps the existing setting. +> +> **display_folder** (str, default=None) +> +>> Optional; The display folder in which the measure will reside. +Defaults to None which keeps the existing setting. +> +> **format_string_expression** (str, default=None) +> +>> Optional; The format string expression for the calculation item. +Defaults to None which keeps the existing setting. +> ### [update_role](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_role) +#### Updates a role within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2414,7 +6234,24 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **role_name** (str) +> +>> Required; Name of the role. +> +> **model_permission** (str, default=None) +> +>> Optional; The model permission for the role. +Defaults to None which keeps the existing setting. +> +> **description** (str, default=None) +> +>> Optional; The description of the role. +Defaults to None which keeps the existing setting. +> ### [used_in_calc_item](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_calc_item) +#### Identifies the ... which reference a given object. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2425,7 +6262,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column) within a semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection; None ### [used_in_data_coverage_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_data_coverage_definition) +#### Identifies the ... which reference a given object. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2436,7 +6286,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column) within a semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection; None ### [used_in_hierarchies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_hierarchies) +#### Shows all [hierarchies](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.hierarchy?view=analysisservices-dotnet) in which a column is used. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2446,7 +6309,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An column object within a semantic model. +> +### Returns +> Microsoft.AnalysisServices.Tabular.HierarchyCollection; All hierarchies in which the column is used. ### [used_in_levels](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_levels) +#### Shows all [levels](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.level?view=analysisservices-dotnet) in which a column is used. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2456,7 +6328,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An column object within a semantic model. +> +### Returns +> Microsoft.AnalysisServices.Tabular.LevelCollection; All levels in which the column is used. ### [used_in_relationships](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_relationships) +#### Shows all [relationships](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.singlecolumnrelationship?view=analysisservices-dotnet) in which a table/column is used. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2466,7 +6347,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column) within a semantic model. +> +### Returns +> Microsoft.AnalysisServices.Tabular.RelationshipCollection; All relationships in which the table/column is used. ### [used_in_rls](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_rls) +#### Identifies the row level security [filter expressions](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.tablepermission.filterexpression?view=analysisservices-dotnet#microsoft-analysisservices-tabular-tablepermission-filterexpression) which reference a given object. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2477,7 +6367,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An object (i.e. table/column) within a semantic model. +> +> **dependencies** (pandas.DataFrame) +> +>> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. +> +### Returns +> Microsoft.AnalysisServices.Tabular.TableCollection, Microsoft.AnalysisServices.Tabular.ColumnCollection, Microsoft.AnalysisServices.Tabular.MeasureCollection; None ### [used_in_sort_by](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_in_sort_by) +#### Shows all columns in which a column is used for sorting. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2487,7 +6390,16 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: ) ``` +### Parameters +> **object** (TOM Object) +> +>> Required; An column object within a semantic model. +> +### Returns +> Microsoft.AnalysisServices.Tabular.ColumnCollection; All columns in which the column is used for sorting. ### [used_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.used_size) +#### Obtains the used size of a hierarchy or relationship within a semantic model. + ```python import sempy_labs as labs from sempy_labs.tom import connect_semantic_model @@ -2496,3 +6408,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: object=tom.model.Tables["Geography"].Hierarchies["Geo Hierarchy"], ) ``` + +### Parameters +> **object** (TOM Object) +> +>> Required; The hierarhcy/relationship object within the semantic model. +> +### Returns +> int; Used size of the TOM object. \ No newline at end of file From 5dba9147233fb8d2e9f5426f67f6ed6ee4d216d6 Mon Sep 17 00:00:00 2001 From: Michael Date: Fri, 6 Sep 2024 10:34:39 +0300 Subject: [PATCH 05/18] updates per comments --- .github/workflows/_generate_func_examples.py | 198 +- function_examples.md | 3201 +++++++---------- .../_show_unsupported_directlake_objects.py | 3 +- 3 files changed, 1424 insertions(+), 1978 deletions(-) diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py index a7882fbe..dbdaf505 100644 --- a/.github/workflows/_generate_func_examples.py +++ b/.github/workflows/_generate_func_examples.py @@ -1,5 +1,4 @@ import inspect -import os import re from docstring_parser import parse import sempy_labs @@ -18,105 +17,118 @@ TOMWrapper: 'tom', } +# Data type mapping +data_type_link_prefix = "" +data_type_map = { + "str": "https://docs.python.org/3/library/stdtypes.html#str", + "list": "https://docs.python.org/3/library/stdtypes.html#list", + "bool": "https://docs.python.org/3/library/stdtypes.html#bool", + "dict": "https://docs.python.org/3/library/typing.html#typing.Dict", + "pandas.DataFrame": "http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame", +} + +data_types = list(data_type_map.keys()) +pattern_type = r'(' + '|'.join(re.escape(dt) for dt in data_types) + r')' + + +def replace_data_type(match): + data_type = match.group(1) # Extract the matched data type + if data_type in data_type_map: + # Build the full link + return f'[{data_type}]({data_type_map[data_type]})' + return match.group(0) # If no match, return the original string + + link_prefix = "https://semantic-link-labs.readthedocs.io/en/stable/" tab = ' ' skip_functions = ['connect_semantic_model', '__init__', 'close'] -pattern = r'`([A-Za-z ]+) <(https?://[^\s]+)>`_' +pattern_desc = r'`([A-Za-z ]+) <(https?://[^\s]+)>`_' +default_values = { + 'dataset': "AdvWorks", + 'email_address': 'hello@goodbye.com', + 'user_name': 'hello@goodbye.com', + 'languages': ['it-IT', 'zh-CN'], + 'dax_query': 'EVALUATE SUMMARIZECOLUMNS("MyMeasure", 1)', + 'column': 'tom.model.Tables["Geography"].Columns["GeographyKey"]', + 'dependencies': 'labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace)', +} + + +def format_link(d_alias, d_name, attr_name): + return f"{link_prefix}sempy_labs.{d_alias}.html#sempy_labs.{d_alias}.{d_name}.{attr_name}" if d_alias == 'tom' else f"{link_prefix}{d_name}.html#{d_name}.{attr_name}" + + +def create_signature(attr_name, sig, d_alias): + func_print = f"{d_alias}.{attr_name}(" + params = [ + f"{param_name}={default_values.get(param_name, param.default) if param.default != inspect.Parameter.empty else ''}" + for param_name, param in sig.parameters.items() if param_name not in ['kwargs', 'self'] + ] + return func_print + ', '.join(params) + ")" + + +def format_docstring_description(description): + return re.sub(pattern_desc, r'[\1](\2)', str(description)) + markdown_example = '## Function Examples\n' -# Function Examples + +# Gather necessary ingredients into a dictionary +func_dict = {} for d, d_alias in dirs.items(): - d_name = d.__name__ + d_name = d.__name__ for attr_name in dir(d): attr = getattr(d, attr_name) - if inspect.isfunction(attr): - if attr_name not in skip_functions: - docstring = parse(attr.__doc__) - link = f"{link_prefix}{d_name}.html#{d_name}.{attr_name}" - if d_alias == 'tom': - link = f"{link_prefix}sempy_labs.{d_alias}.html#sempy_labs.{d_alias}.{d_name}.{attr_name}" - sig = inspect.signature(attr) - markdown_example += f"\n### [{attr_name}]({link})" - attr_description = docstring.description - attr_description = re.sub(pattern, r'[\1](\2)', str(attr_description)) - markdown_example += f"\n#### {attr_description}" - markdown_example += "\n```python" - markdown_example += "\nimport sempy_labs as labs" - if d_alias == 'tom': - markdown_example += "\nfrom sempy_labs.tom import connect_semantic_model" - tf = 'True' - markdown_example += f"\nwith connect_semantic_model(dataset='', workspace='', readonly={tf}) as tom:" - elif d_alias != 'labs': - markdown_example += f"\nimport {d_name} as {d_alias}" - func_print = f"{d_alias}.{attr_name}(" - if d_alias == 'tom': - markdown_example += f"\n{tab}{func_print}" - else: - markdown_example += f"\n{func_print}" - params = [param for param_name, param in sig.parameters.items() if param_name not in ['kwargs', 'self']] - param_count = len(params) - for param_name, param in sig.parameters.items(): - is_optional = False - if param_name not in ['kwargs', 'self']: - param_value = '' - if param.default != inspect.Parameter.empty: - param_value = param.default - is_optional = True - elif param_name == 'dataset': - param_value = "AdvWorks" - elif param_name in ['email_address', 'user_name']: - param_value = 'hello@goodbye.com' - elif param_name == 'languages': - param_value = ['it-IT', 'zh-CN'] - elif param_name == 'dax_query': - param_value = 'EVALUATE SUMMARIZECOLUMNS("MyMeasure", 1)' - elif param_name == 'column': - param_value = 'tom.model.Tables["Geography"].Columns["GeographyKey"]' - elif param_name in ['object']: - if attr_name in ['row_count', 'total_size', 'used_in_relationships', 'used_in_rls', 'set_translation']: - param_value = 'tom.model.Tables["Sales"]' - elif attr_name in ['records_per_segment']: - param_value = 'tom.model.Tables["Sales"].Partitions["Sales"]' - elif attr_name in ['used_size']: - param_value = 'tom.model.Tables["Geography"].Hierarchies["Geo Hierarchy"]' - elif attr_name in ['fully_qualified_measures']: - param_value = 'tom.model.Tables["Sales"].Measures["Sales Amount"]' - elif param_name == 'dependencies': - param_value = 'labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace)' - - if param_value not in [None, True, False] and not isinstance(param_value, list) and param_name not in ['object', 'column', 'dependencies']: - param_value = f"'{param_value}'" - p = f"{tab}{param_name}={param_value}," - if is_optional: - p += " # This parameter is optional" - if d_alias == 'tom': - markdown_example += f"\n{tab}{p}" - else: - markdown_example += f"\n{p}" - closing = ")\n```\n" - if param_count == 0: - markdown_example += closing - else: - markdown_example += f"\n{closing}" - - if docstring.params: - markdown_example += "\n### Parameters" - for p in docstring.params: - p_description = re.sub(pattern, r'[\1](\2)', str(p.description)) - for param_name, param in sig.parameters.items(): - if param_name == p.arg_name: - if param.default != inspect.Parameter.empty: - req = 'Optional' - else: - req = 'Required' - param_value = param.default - - markdown_example += f"\n> **{p.arg_name}** ({p.type_name})\n>\n>> {req}; {p_description}\n>" - if docstring.returns: - ret = docstring.returns - markdown_example += '\n### Returns' - markdown_example += f"\n> {ret.type_name}; {ret.description}" - -output_path = os.path.join('/root/semantic-link-labs', 'function_examples.md') + if inspect.isfunction(attr) and attr_name not in skip_functions: + func_dict[attr_name] = { + 'attr': attr, + 'directory': d_name, + 'directory_alias': d_alias, + } + +for attr_name, attr_info in func_dict.items(): + attr = attr_info['attr'] + d_name = attr_info['directory'] + d_alias = attr_info['directory_alias'] + + docstring = parse(attr.__doc__) + sig = inspect.signature(attr) + link = format_link(d_alias, d_name, attr_name) + description = format_docstring_description(docstring.description) + + # Add Function name with link and description + markdown_example += f"\n### [{attr_name}]({link})\n#### {description}" + # Add Example Section + markdown_example += "\n```python\nimport sempy_labs as labs" + + if d_alias == 'tom': + markdown_example += "\nfrom sempy_labs.tom import connect_semantic_model\nwith connect_semantic_model(dataset='', workspace='', readonly=True) as tom:" + + markdown_example += f"\n{create_signature(attr_name, sig, d_alias)}\n```\n" + + # Add Parameters Section + if docstring.params: + markdown_example += "\n### Parameters" + for param_name, p in sig.parameters.items(): + ind = str(p).find(':')+1 + p_type = str(p)[ind:].lstrip() + param_type = re.sub(pattern_type, replace_data_type, p_type) + + req = 'Optional' if p.default != inspect.Parameter.empty else 'Required' + p_description = next((param.description for param in docstring.params if param.arg_name == param_name), None) + p_description = format_docstring_description(p_description) + + markdown_example += f"\n> **{param_name}** ({param_type})\n>\n>> {req}; {p_description}\n>" + + # Add Returns Section + if docstring.returns: + ret = docstring.returns + ret_type = ret.type_name + return_type = re.sub(pattern_type, replace_data_type, ret_type) + + markdown_example += f"\n### Returns\n> {return_type}; {ret.description}" + +# Write to file +output_path = '/root/semantic-link-labs/function_examples.md' with open(output_path, 'w') as f: f.write(markdown_example) diff --git a/function_examples.md b/function_examples.md index f8ef0ac2..53eb869d 100644 --- a/function_examples.md +++ b/function_examples.md @@ -5,28 +5,23 @@ ```python import sempy_labs as labs -labs.add_user_to_workspace( - email_address='hello@goodbye.com', - role_name='', - principal_type='User', # This parameter is optional - workspace=None, # This parameter is optional -) +labs.add_user_to_workspace(email_address=, role_name=, principal_type=User, workspace=None) ``` ### Parameters -> **email_address** (str) +> **email_address** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The email address of the user. > -> **role_name** (str) +> **role_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The [role](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright) of the user within the workspace. > -> **principal_type** (str, default='User') +> **principal_type** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'User') > >> Optional; The [principal type](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype). > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -37,18 +32,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.assign_workspace_to_capacity( - capacity_name='', - workspace=None, # This parameter is optional -) +labs.assign_workspace_to_capacity(capacity_name=, workspace=None) ``` ### Parameters -> **capacity_name** (str) +> **capacity_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the capacity. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -59,18 +51,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.assign_workspace_to_dataflow_storage( - dataflow_storage_account='', - workspace=None, # This parameter is optional -) +labs.assign_workspace_to_dataflow_storage(dataflow_storage_account=, workspace=None) ``` ### Parameters -> **dataflow_storage_account** (str) +> **dataflow_storage_account** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the dataflow storage account. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -81,36 +70,30 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.backup_semantic_model( - dataset='AdvWorks', - file_path='', - allow_overwrite=True, # This parameter is optional - apply_compression=True, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.backup_semantic_model(dataset=, file_path=, allow_overwrite=True, apply_compression=True, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **file_path** (str) +> **file_path** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The ADLS Gen2 storage account location in which to backup the semantic model. Always saves within the 'power-bi-backup/' folder. Must end in '.abf'. Example 1: file_path = 'MyModel.abf' Example 2: file_path = 'MyFolder/MyModel.abf' > -> **allow_overwrite** (bool, default=True) +> **allow_overwrite** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location. > -> **apply_compression** (bool, default=True) +> **apply_compression** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If True, compresses the backup file. Compressed backup files save disk space, but require slightly higher CPU utilization. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -121,24 +104,20 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.cancel_dataset_refresh( - dataset='AdvWorks', - request_id=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.cancel_dataset_refresh(dataset=, request_id=None, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **request_id** (str, default=None) +> **request_id** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The request id of a semantic model refresh. Defaults to finding the latest active refresh of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -149,18 +128,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. See [here](https://learn.microsoft.com/analysis-services/instances/clear-the-analysis-services-caches?view=asallproducts-allversions) for documentation. ```python import sempy_labs as labs -labs.clear_cache( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.clear_cache(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -171,24 +147,20 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.commit_to_git( - comment='', - item_ids=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.commit_to_git(comment=, item_ids=None, workspace=None) ``` ### Parameters -> **comment** (str) +> **comment** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The Git commit comment. > -> **item_ids** (str | List[str], default=None) +> **item_ids** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]] = None) > >> Optional; A list of item Ids to commit to Git. Defaults to None which commits all items to Git. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -199,43 +171,35 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.connect_workspace_to_git( - organization_name='', - project_name='', - repository_name='', - branch_name='', - directory_name='', - git_provider_type='AzureDevOps', # This parameter is optional - workspace=None, # This parameter is optional -) +labs.connect_workspace_to_git(organization_name=, project_name=, repository_name=, branch_name=, directory_name=, git_provider_type=AzureDevOps, workspace=None) ``` ### Parameters -> **organization_name** (str) +> **organization_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The organization name. > -> **project_name** (str) +> **project_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The project name. > -> **repository_name** (str) +> **repository_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The repository name. > -> **branch_name** (str) +> **branch_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The branch name. > -> **directory_name** (str) +> **directory_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The directory name. > -> **git_provider_type** (str, default="AzureDevOps") +> **git_provider_type** ([str](https://docs.python.org/3/library/stdtypes.html#str) = 'AzureDevOps') > >> Optional; A [Git provider type](https://learn.microsoft.com/rest/api/fabric/core/git/connect?tabs=HTTP#gitprovidertype). > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -250,53 +214,43 @@ Requirements: 3. Must save the Account Key from the Azure storage account as a secret within Azure Key Vault. ```python import sempy_labs as labs -labs.copy_semantic_model_backup_file( - source_workspace='', - target_workspace='', - source_file_name='', - target_file_name='', - storage_account_url='', - key_vault_uri='', - key_vault_account_key='', - source_file_system='power-bi-backup', # This parameter is optional - target_file_system='power-bi-backup', # This parameter is optional -) +labs.copy_semantic_model_backup_file(source_workspace=, target_workspace=, source_file_name=, target_file_name=, storage_account_url=, key_vault_uri=, key_vault_account_key=, source_file_system=power-bi-backup, target_file_system=power-bi-backup) ``` ### Parameters -> **source_workspace** (str) +> **source_workspace** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The workspace name of the source semantic model backup file. > -> **target_workspace** (str) +> **target_workspace** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The workspace name of the target semantic model backup file destination. > -> **source_file_name** (str) +> **source_file_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the source backup file (i.e. MyModel.abf). > -> **target_file_name** (str) +> **target_file_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the target backup file (i.e. MyModel.abf). > -> **storage_account_url** (str) +> **storage_account_url** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The URL of the storage account. To find this, navigate to the storage account within the Azure Portal. Within 'Endpoints', see the value for the 'Primary Endpoint'. > -> **key_vault_uri** (str) +> **key_vault_uri** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The URI of the Azure Key Vault account. > -> **key_vault_account_key** (str) +> **key_vault_account_key** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The key vault secret name which contains the account key of the Azure storage account. > -> **source_file_system** (str, default="power-bi-backup") +> **source_file_system** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'power-bi-backup') > >> Optional; The container in which the source backup file is located. > -> **target_file_system** (str, default="power-bi-backup") +> **target_file_system** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'power-bi-backup') > >> Optional; The container in which the target backup file will be saved. > @@ -305,50 +259,42 @@ labs.copy_semantic_model_backup_file( ```python import sempy_labs as labs -labs.create_abfss_path( - lakehouse_id='', - lakehouse_workspace_id='', - delta_table_name='', -) +labs.create_abfss_path(lakehouse_id=, lakehouse_workspace_id=, delta_table_name=) ``` ### Parameters -> **lakehouse_id** (UUID) +> **lakehouse_id** (uuid.UUID) > >> Required; ID of the Fabric lakehouse. > -> **lakehouse_workspace_id** (UUID) +> **lakehouse_workspace_id** (uuid.UUID) > >> Required; ID of the Fabric workspace. > -> **delta_table_name** (str) +> **delta_table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the delta table name. > ### Returns -> str; An abfss path which can be used to save/reference a delta table in a Fabric lakehouse. +> [str](https://docs.python.org/3/library/stdtypes.html#str); An abfss path which can be used to save/reference a delta table in a Fabric lakehouse. ### [create_blank_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_blank_semantic_model) #### Creates a new blank semantic model (no tables/columns etc.). ```python import sempy_labs as labs -labs.create_blank_semantic_model( - dataset='AdvWorks', - compatibility_level='1605', # This parameter is optional - workspace=None, # This parameter is optional -) +labs.create_blank_semantic_model(dataset=, compatibility_level=1605, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **compatibility_level** (int, default=1605) +> **compatibility_level** (Optional[int] = 1605) > >> Optional; The compatibility level of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -359,26 +305,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.create_custom_pool( - pool_name='', - node_size='', - min_node_count='', - max_node_count='', - min_executors='', - max_executors='', - node_family='MemoryOptimized', # This parameter is optional - auto_scale_enabled=True, # This parameter is optional - dynamic_executor_allocation_enabled=True, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.create_custom_pool(pool_name=, node_size=, min_node_count=, max_node_count=, min_executors=, max_executors=, node_family=MemoryOptimized, auto_scale_enabled=True, dynamic_executor_allocation_enabled=True, workspace=None) ``` ### Parameters -> **pool_name** (str) +> **pool_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The custom pool name. > -> **node_size** (str) +> **node_size** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The [node size](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize). > @@ -398,19 +333,19 @@ labs.create_custom_pool( > >> Required; The [maximum executors](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). > -> **node_family** (str, default='MemoryOptimized') +> **node_family** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'MemoryOptimized') > >> Optional; The [node family](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodefamily). > -> **auto_scale_enabled** (bool, default=True) +> **auto_scale_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; The status of [auto scale](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). > -> **dynamic_executor_allocation_enabled** (bool, default=True) +> **dynamic_executor_allocation_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; The status of the [dynamic executor allocation](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -424,24 +359,20 @@ on multiple workspaces at once (and over time). The semantic model is always created within the same workspace as the lakehouse. ```python import sempy_labs as labs -labs.create_model_bpa_semantic_model( - dataset='ModelBPA', # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +labs.create_model_bpa_semantic_model(dataset=AdvWorks, lakehouse=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str, default='ModelBPA') +> **dataset** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'ModelBPA') > >> Optional; Name of the semantic model to be created. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Name of the Fabric lakehouse which contains the 'modelbparesults' delta table. Defaults to None which resolves to the default lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The workspace in which the lakehouse resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -452,55 +383,46 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.create_relationship_name( - from_table='', - from_column='', - to_table='', - to_column='', -) +labs.create_relationship_name(from_table=, from_column=, to_table=, to_column=) ``` ### Parameters -> **from_table** (str) +> **from_table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the table on the 'from' side of the relationship. > -> **from_column** (str) +> **from_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the column on the 'from' side of the relationship. > -> **to_table** (str) +> **to_table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the table on the 'to' side of the relationship. > -> **to_column** (str) +> **to_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the column on the 'to' side of the relationship. > ### Returns -> str; The fully qualified relationship name. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The fully qualified relationship name. ### [create_semantic_model_from_bim](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_semantic_model_from_bim) #### Creates a new semantic model based on a Model.bim file. ```python import sempy_labs as labs -labs.create_semantic_model_from_bim( - dataset='AdvWorks', - bim_file='', - workspace=None, # This parameter is optional -) +labs.create_semantic_model_from_bim(dataset=, bim_file=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **bim_file** (dict) +> **bim_file** ([dict](https://docs.python.org/3/library/typing.html#typing.Dict)) > >> Required; The model.bim file. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -511,23 +433,19 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.create_warehouse( - warehouse='', - description=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.create_warehouse(warehouse=, description=None, workspace=None) ``` ### Parameters -> **warehouse** (str) +> **warehouse** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the warehouse. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the warehouse. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -538,18 +456,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.delete_custom_pool( - pool_name='', - workspace=None, # This parameter is optional -) +labs.delete_custom_pool(pool_name=, workspace=None) ``` ### Parameters -> **pool_name** (str) +> **pool_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The custom pool name. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -560,18 +475,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.delete_user_from_workspace( - email_address='hello@goodbye.com', - workspace=None, # This parameter is optional -) +labs.delete_user_from_workspace(email_address=, workspace=None) ``` ### Parameters -> **email_address** (str) +> **email_address** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The email address of the user. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -582,37 +494,31 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.deploy_semantic_model( - source_dataset='', - source_workspace=None, # This parameter is optional - target_dataset=None, # This parameter is optional - target_workspace=None, # This parameter is optional - refresh_target_dataset=True, # This parameter is optional -) +labs.deploy_semantic_model(source_dataset=, source_workspace=None, target_dataset=None, target_workspace=None, refresh_target_dataset=True) ``` ### Parameters -> **source_dataset** (str) +> **source_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model to deploy. > -> **source_workspace** (str, default=None) +> **source_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **target_dataset** (str) +> **target_dataset** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Name of the new semantic model to be created. > -> **target_workspace** (str, default=None) +> **target_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the new semantic model will be deployed. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **refresh_target_dataset** (bool, default=True) +> **refresh_target_dataset** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If set to True, this will initiate a full refresh of the target semantic model in the target workspace. > @@ -621,13 +527,11 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.deprovision_workspace_identity( - workspace=None, # This parameter is optional -) +labs.deprovision_workspace_identity(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -638,37 +542,32 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.disable_qso( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.disable_qso(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the current query scale out settings. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the current query scale out settings. ### [disconnect_workspace_from_git](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.disconnect_workspace_from_git) #### Disconnects a workpsace from a git repository. ```python import sempy_labs as labs -labs.disconnect_workspace_from_git( - workspace=None, # This parameter is optional -) +labs.disconnect_workspace_from_git(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -681,64 +580,54 @@ Compared to evaluate_dax this allows passing the user name for impersonation. Note that the REST API has significant limitations compared to the XMLA endpoint. ```python import sempy_labs as labs -labs.evaluate_dax_impersonation( - dataset='AdvWorks', - dax_query='EVALUATE SUMMARIZECOLUMNS("MyMeasure", 1)', - user_name=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.evaluate_dax_impersonation(dataset=, dax_query=, user_name=hello@goodbye.com, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **dax_query** (str) +> **dax_query** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DAX query. > -> **user_name** (str) +> **user_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The user name (i.e. hello@goodbye.com). > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe holding the result of the DAX query. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe holding the result of the DAX query. ### [export_model_to_onelake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.export_model_to_onelake) #### Exports a semantic model's tables to delta tables in the lakehouse. Creates shortcuts to the tables if a lakehouse is specified. ```python import sempy_labs as labs -labs.export_model_to_onelake( - dataset='AdvWorks', - workspace=None, # This parameter is optional - destination_lakehouse=None, # This parameter is optional - destination_workspace=None, # This parameter is optional -) +labs.export_model_to_onelake(dataset=, workspace=None, destination_lakehouse=None, destination_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **destination_lakehouse** (str, default=None) +> **destination_lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric lakehouse where shortcuts will be created to access the delta tables created by the export. If the lakehouse specified does not exist, one will be created with that name. If no lakehouse is specified, shortcuts will not be created. > -> **destination_workspace** (str, default=None) +> **destination_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which the lakehouse resides. > @@ -747,52 +636,45 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.format_dax_object_name( - table='', - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +labs.format_dax_object_name(table=, column=) ``` ### Parameters -> **table** (str) +> **table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the table. > -> **column** (str) +> **column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the column. > ### Returns -> str; The fully qualified object name. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The fully qualified object name. ### [generate_embedded_filter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.generate_embedded_filter) #### Converts the filter expression to a filter expression which can be used by a Power BI embedded URL. ```python import sempy_labs as labs -labs.generate_embedded_filter( - filter='', -) +labs.generate_embedded_filter(filter=) ``` ### Parameters -> **filter** (str) +> **filter** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The filter expression for an embedded Power BI report. > ### Returns -> str; A filter expression usable by a Power BI embedded URL. +> [str](https://docs.python.org/3/library/stdtypes.html#str); A filter expression usable by a Power BI embedded URL. ### [get_capacity_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_capacity_id) #### Obtains the Capacity Id for a given workspace. ```python import sempy_labs as labs -labs.get_capacity_id( - workspace=None, # This parameter is optional -) +labs.get_capacity_id(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -805,37 +687,32 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.get_capacity_name( - workspace=None, # This parameter is optional -) +labs.get_capacity_name(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; The capacity name. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The capacity name. ### [get_direct_lake_sql_endpoint](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_direct_lake_sql_endpoint) #### Obtains the SQL Endpoint ID of the semantic model. ```python import sempy_labs as labs -labs.get_direct_lake_sql_endpoint( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.get_direct_lake_sql_endpoint(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -848,111 +725,97 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.get_git_connection( - workspace=None, # This parameter is optional -) +labs.get_git_connection(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the Git status of items in the workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the Git status of items in the workspace. ### [get_git_status](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_git_status) #### Obtains the Git status of items in the workspace, that can be committed to Git. ```python import sempy_labs as labs -labs.get_git_status( - workspace=None, # This parameter is optional -) +labs.get_git_status(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the Git status of items in the workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the Git status of items in the workspace. ### [get_measure_dependencies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_measure_dependencies) #### Shows all dependencies for all measures in a semantic model. ```python import sempy_labs as labs -labs.get_measure_dependencies( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.get_measure_dependencies(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; Shows all dependencies for all measures in the semantic model. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Shows all dependencies for all measures in the semantic model. ### [get_model_calc_dependencies](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_model_calc_dependencies) #### Shows all dependencies for all objects in a semantic model. ```python import sempy_labs as labs -labs.get_model_calc_dependencies( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.get_model_calc_dependencies(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; Shows all dependencies for all objects in the semantic model. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Shows all dependencies for all objects in the semantic model. ### [get_notebook_definition](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_notebook_definition) #### Obtains the notebook definition. ```python import sempy_labs as labs -labs.get_notebook_definition( - notebook_name='', - workspace=None, # This parameter is optional - decode=True, # This parameter is optional -) +labs.get_notebook_definition(notebook_name=, workspace=None, decode=True) ``` ### Parameters -> **notebook_name** (str) +> **notebook_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **decode** (bool, default=True) +> **decode** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If True, decodes the notebook definition file into .ipynb format. If False, obtains the notebook definition file in base64 format. @@ -964,108 +827,93 @@ If False, obtains the notebook definition file in base64 format. ```python import sempy_labs as labs -labs.get_object_level_security( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.get_object_level_security(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the object level security for the semantic model. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the object level security for the semantic model. ### [get_semantic_model_bim](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_semantic_model_bim) #### Extracts the Model.bim file for a given semantic model. ```python import sempy_labs as labs -labs.get_semantic_model_bim( - dataset='AdvWorks', - workspace=None, # This parameter is optional - save_to_file_name=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +labs.get_semantic_model_bim(dataset=, workspace=None, save_to_file_name=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the semantic model resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **save_to_file_name** (str, default=None) +> **save_to_file_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; If specified, saves the Model.bim as a file in the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the lakehouse attached to the workspace resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> dict; The Model.bim file for the semantic model. +> [dict](https://docs.python.org/3/library/typing.html#typing.Dict); The Model.bim file for the semantic model. ### [get_spark_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_spark_settings) #### Shows the spark settings for a workspace. ```python import sempy_labs as labs -labs.get_spark_settings( - workspace=None, # This parameter is optional -) +labs.get_spark_settings(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the spark settings for a workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the spark settings for a workspace. ### [import_notebook_from_web](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_notebook_from_web) #### Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web. ```python import sempy_labs as labs -labs.import_notebook_from_web( - notebook_name='', - url='', - description=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.import_notebook_from_web(notebook_name=, url=, description=None, workspace=None) ``` ### Parameters -> **notebook_name** (str) +> **notebook_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the notebook to be created. > -> **url** (str) +> **url** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The url of the Jupyter Notebook (.ipynb) > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The description of the notebook. Defaults to None which does not place a description. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1076,35 +924,30 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.import_vertipaq_analyzer( - folder_path='', - file_name='', -) +labs.import_vertipaq_analyzer(folder_path=, file_name=) ``` ### Parameters -> **folder_path** (str) +> **folder_path** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The folder within your lakehouse in which the .zip file containing the vertipaq analyzer info has been saved. > -> **file_name** (str) +> **file_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The file name of the file which contains the vertipaq analyzer info. > ### Returns -> str; A visualization of the Vertipaq Analyzer statistics. +> [str](https://docs.python.org/3/library/stdtypes.html#str); A visualization of the Vertipaq Analyzer statistics. ### [initialize_git_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.initialize_git_connection) #### Initializes a connection for a workspace that is connected to Git. ```python import sempy_labs as labs -labs.initialize_git_connection( - workspace=None, # This parameter is optional -) +labs.initialize_git_connection(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1115,25 +958,22 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.is_default_semantic_model( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.is_default_semantic_model(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> bool; A True/False value indicating whether the semantic model is a default semantic model. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); A True/False value indicating whether the semantic model is a default semantic model. ### [list_capacities](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_capacities) #### Shows the capacities and their properties. @@ -1143,45 +983,41 @@ labs.list_capacities() ``` ### Returns -> pandas.DataFrame; A pandas dataframe showing the capacities and their properties +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the capacities and their properties ### [list_custom_pools](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_custom_pools) #### Lists all [custom pools](https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools) within a workspace. ```python import sempy_labs as labs -labs.list_custom_pools( - workspace=None, # This parameter is optional -) +labs.list_custom_pools(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing all the custom pools within the Fabric workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing all the custom pools within the Fabric workspace. ### [list_dashboards](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dashboards) #### Shows a list of the dashboards within a workspace. ```python import sempy_labs as labs -labs.list_dashboards( - workspace=None, # This parameter is optional -) +labs.list_dashboards(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the dashboards within a workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the dashboards within a workspace. ### [list_dataflow_storage_accounts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflow_storage_accounts) #### Shows the accessible dataflow storage accounts. @@ -1191,65 +1027,58 @@ labs.list_dataflow_storage_accounts() ``` ### Returns -> pandas.DataFrame; A pandas dataframe showing the accessible dataflow storage accounts. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the accessible dataflow storage accounts. ### [list_dataflows](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_dataflows) #### Shows a list of all dataflows which exist within a workspace. ```python import sempy_labs as labs -labs.list_dataflows( - workspace=None, # This parameter is optional -) +labs.list_dataflows(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the dataflows which exist within a workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the dataflows which exist within a workspace. ### [list_deployment_pipeline_stage_items](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipeline_stage_items) #### Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline. ```python import sempy_labs as labs -labs.list_deployment_pipeline_stage_items( - deployment_pipeline='', - stage_name='', -) +labs.list_deployment_pipeline_stage_items(deployment_pipeline=, stage_name=) ``` ### Parameters -> **deployment_pipeline** (str) +> **deployment_pipeline** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The deployment pipeline name. > -> **stage_name** (str) +> **stage_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The deployment pipeline stage name. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the supported items from the workspace assigned to the specified stage of the specified deployment pipeline. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the supported items from the workspace assigned to the specified stage of the specified deployment pipeline. ### [list_deployment_pipeline_stages](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipeline_stages) #### Shows the specified deployment pipeline stages. ```python import sempy_labs as labs -labs.list_deployment_pipeline_stages( - deployment_pipeline='', -) +labs.list_deployment_pipeline_stages(deployment_pipeline=) ``` ### Parameters -> **deployment_pipeline** (str) +> **deployment_pipeline** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The deployment pipeline name. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the specified deployment pipeline stages. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the specified deployment pipeline stages. ### [list_deployment_pipelines](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_deployment_pipelines) #### Shows a list of deployment pipelines the user can access. @@ -1259,202 +1088,178 @@ labs.list_deployment_pipelines() ``` ### Returns -> pandas.DataFrame; A pandas dataframe showing a list of deployment pipelines the user can access. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing a list of deployment pipelines the user can access. ### [list_lakehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_lakehouses) #### Shows the lakehouses within a workspace. ```python import sempy_labs as labs -labs.list_lakehouses( - workspace=None, # This parameter is optional -) +labs.list_lakehouses(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the lakehouses within a workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the lakehouses within a workspace. ### [list_qso_settings](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_qso_settings) #### Shows the query scale out settings for a semantic model (or all semantic models within a workspace). ```python import sempy_labs as labs -labs.list_qso_settings( - dataset=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.list_qso_settings(dataset=AdvWorks, workspace=None) ``` ### Parameters -> **dataset** (str, default=None) +> **dataset** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the query scale out settings. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the query scale out settings. ### [list_reports_using_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_reports_using_semantic_model) #### Shows a list of all the reports (in all workspaces) which use a given semantic model. ```python import sempy_labs as labs -labs.list_reports_using_semantic_model( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.list_reports_using_semantic_model(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the reports which use a given semantic model. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the reports which use a given semantic model. ### [list_semantic_model_objects](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_objects) #### Shows a list of semantic model objects. ```python import sempy_labs as labs -labs.list_semantic_model_objects( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.list_semantic_model_objects(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing a list of objects in the semantic model +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing a list of objects in the semantic model ### [list_shortcuts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_shortcuts) #### Shows all shortcuts which exist in a Fabric lakehouse and their properties. ```python import sempy_labs as labs -labs.list_shortcuts( - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.list_shortcuts(lakehouse=None, workspace=None) ``` ### Parameters -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse name. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which lakehouse resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing all the shortcuts which exist in the specified lakehouse. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing all the shortcuts which exist in the specified lakehouse. ### [list_warehouses](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_warehouses) #### Shows the warehouses within a workspace. ```python import sempy_labs as labs -labs.list_warehouses( - workspace=None, # This parameter is optional -) +labs.list_warehouses(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the warehouses within a workspace. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the warehouses within a workspace. ### [list_workspace_role_assignments](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_workspace_role_assignments) #### Shows the members of a given workspace. ```python import sempy_labs as labs -labs.list_workspace_role_assignments( - workspace=None, # This parameter is optional -) +labs.list_workspace_role_assignments(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the members of a given workspace and their roles. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the members of a given workspace and their roles. ### [list_workspace_users](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_workspace_users) #### A list of all the users of a workspace and their roles. ```python import sempy_labs as labs -labs.list_workspace_users( - workspace=None, # This parameter is optional -) +labs.list_workspace_users(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe the users of a workspace and their properties. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe the users of a workspace and their properties. ### [measure_dependency_tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree) #### Prints a measure dependency tree of all dependent objects for a measure in a semantic model. ```python import sempy_labs as labs -labs.measure_dependency_tree( - dataset='AdvWorks', - measure_name='', - workspace=None, # This parameter is optional -) +labs.measure_dependency_tree(dataset=, measure_name=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **measure_name** (str) +> **measure_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the measure. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1465,42 +1270,36 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.model_bpa_rules( - dataset='AdvWorks', - workspace=None, # This parameter is optional - dependencies=None, # This parameter is optional -) +labs.model_bpa_rules(dataset=, workspace=None, dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace)) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **dependencies** (pd.DataFrame, default=None) +> **dependencies** (Optional[pandas.core.frame.DataFrame] = None) > >> Optional; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > ### Returns -> pandas.DataFrame; A pandas dataframe containing the default rules for the run_model_bpa function. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe containing the default rules for the run_model_bpa function. ### [provision_workspace_identity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.provision_workspace_identity) #### Provisions a workspace identity for a workspace. ```python import sempy_labs as labs -labs.provision_workspace_identity( - workspace=None, # This parameter is optional -) +labs.provision_workspace_identity(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1511,18 +1310,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.qso_sync( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.qso_sync(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1533,74 +1329,62 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.qso_sync_status( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.qso_sync_status(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> Tuple[pandas.DataFrame, pandas.DataFrame]; 2 pandas dataframes showing the query scale-out sync status. +> Tuple[[pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame), [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame)]; 2 pandas dataframes showing the query scale-out sync status. ### [refresh_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.refresh_semantic_model) #### Refreshes a semantic model. ```python import sempy_labs as labs -labs.refresh_semantic_model( - dataset='AdvWorks', - tables=None, # This parameter is optional - partitions=None, # This parameter is optional - refresh_type=None, # This parameter is optional - retry_count=0, # This parameter is optional - apply_refresh_policy=True, # This parameter is optional - max_parallelism='10', # This parameter is optional - workspace=None, # This parameter is optional -) +labs.refresh_semantic_model(dataset=, tables=None, partitions=None, refresh_type=None, retry_count=0, apply_refresh_policy=True, max_parallelism=10, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **tables** (str, List[str], default=None) +> **tables** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = None) > >> Optional; A string or a list of tables to refresh. > -> **partitions** (str, List[str], default=None) +> **partitions** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = None) > >> Optional; A string or a list of partitions to refresh. Partitions must be formatted as such: 'Table Name'[Partition Name]. > -> **refresh_type** (str, default='full') +> **refresh_type** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The type of processing to perform. Types align with the TMSL refresh command types: full, clearValues, calculate, dataOnly, automatic, and defragment. The add type isn't supported. Defaults to "full". > -> **retry_count** (int, default=0) +> **retry_count** (Optional[int] = 0) > >> Optional; Number of times the operation retries before failing. > -> **apply_refresh_policy** (bool, default=True) +> **apply_refresh_policy** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If an incremental refresh policy is defined, determines whether to apply the policy. Modes are true or false. If the policy isn't applied, the full process leaves partition definitions unchanged, and fully refreshes all partitions in the table. If commitMode is transactional, applyRefreshPolicy can be true or false. If commitMode is partialBatch, applyRefreshPolicy of true isn't supported, and applyRefreshPolicy must be set to false. > -> **max_parallelism** (int, default=10) +> **max_parallelism** (Optional[int] = 10) > >> Optional; Determines the maximum number of threads that can run the processing commands in parallel. This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods. Defaults to 10. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1611,37 +1395,32 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.resolve_capacity_name( - capacity_id=None, # This parameter is optional -) +labs.resolve_capacity_name(capacity_id=None) ``` ### Parameters -> **capacity_id** (UUID, default=None) +> **capacity_id** (Optional[uuid.UUID] = None) > >> Optional; The capacity Id. Defaults to None which resolves to the capacity name of the workspace of the attached lakehouse or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook. > ### Returns -> str; The capacity name. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The capacity name. ### [resolve_dataset_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_dataset_id) #### Obtains the ID of the semantic model. ```python import sempy_labs as labs -labs.resolve_dataset_id( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +labs.resolve_dataset_id(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1654,66 +1433,57 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.resolve_dataset_name( - dataset_id='', - workspace=None, # This parameter is optional -) +labs.resolve_dataset_name(dataset_id=, workspace=None) ``` ### Parameters -> **dataset_id** (UUID) +> **dataset_id** (uuid.UUID) > >> Required; The name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; The name of the semantic model. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The name of the semantic model. ### [resolve_item_type](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_item_type) #### Obtains the item type for a given Fabric Item Id within a Fabric workspace. ```python import sempy_labs as labs -labs.resolve_item_type( - item_id='', - workspace=None, # This parameter is optional -) +labs.resolve_item_type(item_id=, workspace=None) ``` ### Parameters -> **item_id** (UUID) +> **item_id** (uuid.UUID) > >> Required; The item/artifact Id. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; The item type for the item Id. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The item type for the item Id. ### [resolve_lakehouse_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_lakehouse_id) #### Obtains the ID of the Fabric lakehouse. ```python import sempy_labs as labs -labs.resolve_lakehouse_id( - lakehouse='', - workspace=None, # This parameter is optional -) +labs.resolve_lakehouse_id(lakehouse=, workspace=None) ``` ### Parameters -> **lakehouse** (str) +> **lakehouse** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the Fabric lakehouse. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1726,43 +1496,37 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.resolve_lakehouse_name( - lakehouse_id=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.resolve_lakehouse_name(lakehouse_id=None, workspace=None) ``` ### Parameters -> **lakehouse_id** (UUID, default=None) +> **lakehouse_id** (Optional[uuid.UUID] = None) > >> Optional; The name of the Fabric lakehouse. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; The name of the Fabric lakehouse. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The name of the Fabric lakehouse. ### [resolve_report_id](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_report_id) #### Obtains the ID of the Power BI report. ```python import sempy_labs as labs -labs.resolve_report_id( - report='', - workspace=None, # This parameter is optional -) +labs.resolve_report_id(report=, workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the Power BI report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1775,83 +1539,71 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.resolve_report_name( - report_id='', - workspace=None, # This parameter is optional -) +labs.resolve_report_name(report_id=, workspace=None) ``` ### Parameters -> **report_id** (UUID) +> **report_id** (uuid.UUID) > >> Required; The name of the Power BI report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; The name of the Power BI report. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The name of the Power BI report. ### [resolve_workspace_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resolve_workspace_capacity) #### Obtains the capacity Id and capacity name for a given workspace. ```python import sempy_labs as labs -labs.resolve_workspace_capacity( - workspace=None, # This parameter is optional -) +labs.resolve_workspace_capacity(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> Tuple[UUID, str]; capacity Id; capacity came. +> Tuple[UUID, [str](https://docs.python.org/3/library/stdtypes.html#str)]; capacity Id; capacity came. ### [restore_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model) #### [Restores](https://learn.microsoft.com/power-bi/enterprise/service-premium-backup-restore-dataset) a semantic model based on a backup (.abf) file within the ADLS Gen2 storage account connected to the workspace. ```python import sempy_labs as labs -labs.restore_semantic_model( - dataset='AdvWorks', - file_path='', - allow_overwrite=True, # This parameter is optional - ignore_incompatibilities=True, # This parameter is optional - force_restore=False, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.restore_semantic_model(dataset=, file_path=, allow_overwrite=True, ignore_incompatibilities=True, force_restore=False, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **file_path** (str) +> **file_path** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The location in which to backup the semantic model. Must end in '.abf'. Example 1: file_path = 'MyModel.abf' Example 2: file_path = 'MyFolder/MyModel.abf' > -> **allow_overwrite** (bool, default=True) +> **allow_overwrite** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location. > -> **ignore_incompatibilities** (bool, default=True) +> **ignore_incompatibilities** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If True, ignores incompatibilities between Azure Analysis Services and Power BI Premium. > -> **force_restore** (bool, default=False) +> **force_restore** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If True, restores the semantic model with the existing semantic model unloaded and offline. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1862,90 +1614,76 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.run_model_bpa( - dataset='AdvWorks', - rules=None, # This parameter is optional - workspace=None, # This parameter is optional - export=False, # This parameter is optional - return_dataframe=False, # This parameter is optional - extended=False, # This parameter is optional - language=None, # This parameter is optional -) +labs.run_model_bpa(dataset=, rules=None, workspace=None, export=False, return_dataframe=False, extended=False, language=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **rules** (pandas.DataFrame, default=None) +> **rules** (Optional[pandas.core.frame.DataFrame] = None) > >> Optional; A pandas dataframe containing rules to be evaluated. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **export** (bool, default=False) +> **export** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If True, exports the resulting dataframe to a delta table in the lakehouse attached to the notebook. > -> **return_dataframe** (bool, default=False) +> **return_dataframe** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If True, returns a pandas dataframe instead of the visualization. > -> **extended** (bool, default=False) +> **extended** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If True, runs the set_vertipaq_annotations function to collect Vertipaq Analyzer statistics to be used in the analysis of the semantic model. > -> **language** (str, default=None) +> **language** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Specifying a language code (i.e. 'it-IT' for Italian) will auto-translate the Category, Rule Name and Description into the specified language. Defaults to None which resolves to English. > +> **kwargs** (**kwargs) +> +>> Required; None +> ### Returns -> pandas.DataFrame; A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules. ### [run_model_bpa_bulk](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa_bulk) #### Runs the semantic model Best Practice Analyzer across all semantic models in a workspace (or all accessible workspaces). Saves (appends) the results to the 'modelbparesults' delta table in the lakehouse attached to the notebook. Default semantic models are skipped in this analysis. ```python import sempy_labs as labs -labs.run_model_bpa_bulk( - rules=None, # This parameter is optional - extended=False, # This parameter is optional - language=None, # This parameter is optional - workspace=None, # This parameter is optional - skip_models=['ModelBPA', 'Fabric Capacity Metrics'], # This parameter is optional -) +labs.run_model_bpa_bulk(rules=None, extended=False, language=None, workspace=None, skip_models=['ModelBPA', 'Fabric Capacity Metrics']) ``` ### Parameters -> **dataset** (str) -> ->> Optional; Name of the semantic model. -> -> **rules** (pandas.DataFrame, default=None) +> **rules** (Optional[pandas.core.frame.DataFrame] = None) > >> Optional; A pandas dataframe containing rules to be evaluated. Based on the format of the dataframe produced by the model_bpa_rules function. > -> **extended** (bool, default=False) +> **extended** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If True, runs the set_vertipaq_annotations function to collect Vertipaq Analyzer statistics to be used in the analysis of the semantic model. > -> **language** (str, default=None) +> **language** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The language (code) in which the rules will appear. For example, specifying 'it-IT' will show the Rule Name, Category and Description in Italian. Defaults to None which resolves to English. > -> **workspace** (str | List[str], default=None) +> **workspace** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = None) > >> Optional; The workspace or list of workspaces to scan. Defaults to None which scans all accessible workspaces. > -> **skip_models** (str | List[str], default=['ModelBPA', 'Fabric Capacity Metrics']) +> **skip_models** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = ['ModelBPA', 'Fabric Capacity Metrics']) > >> Optional; The semantic models to always skip when running this analysis. > @@ -1954,39 +1692,32 @@ Defaults to None which scans all accessible workspaces. ```python import sempy_labs as labs -labs.save_as_delta_table( - dataframe='', - delta_table_name='', - write_mode='', - merge_schema=False, # This parameter is optional - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.save_as_delta_table(dataframe=, delta_table_name=, write_mode=, merge_schema=False, lakehouse=None, workspace=None) ``` ### Parameters -> **dataframe** (pandas.DataFrame) +> **dataframe** (dataframe) > >> Required; The dataframe to be saved as a delta table. > -> **delta_table_name** (str) +> **delta_table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the delta table. > -> **write_mode** (str) +> **write_mode** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The write mode for the save operation. Options: 'append', 'overwrite'. > -> **merge_schema** (bool, default=False) +> **merge_schema** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Merges the schemas of the dataframe to the delta table. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -1999,57 +1730,48 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.set_qso( - dataset='AdvWorks', - auto_sync=True, # This parameter is optional - max_read_only_replicas='-1', # This parameter is optional - workspace=None, # This parameter is optional -) +labs.set_qso(dataset=, auto_sync=True, max_read_only_replicas=-1, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **auto_sync** (bool, default=True) +> **auto_sync** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; Whether the semantic model automatically syncs read-only replicas. > -> **max_read_only_replicas** (int, default=-1) +> **max_read_only_replicas** (Optional[int] = -1) > >> Optional; To enable semantic model scale-out, set max_read_only_replicas to -1, or any non-0 value. A value of -1 allows Power BI to create as many read-only replicas as your Power BI capacity supports. You can also explicitly set the replica count to a value lower than that of the capacity maximum. Setting max_read_only_replicas to -1 is recommended. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the current query scale-out settings. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the current query scale-out settings. ### [set_semantic_model_storage_format](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.set_semantic_model_storage_format) #### Sets the semantic model storage format. ```python import sempy_labs as labs -labs.set_semantic_model_storage_format( - dataset='AdvWorks', - storage_format='', - workspace=None, # This parameter is optional -) +labs.set_semantic_model_storage_format(dataset=, storage_format=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **storage_format** (str) +> **storage_format** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The storage format for the semantic model. Valid options: 'Large', 'Small'. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2060,18 +1782,15 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.set_workspace_default_storage_format( - storage_format='', - workspace=None, # This parameter is optional -) +labs.set_workspace_default_storage_format(storage_format=, workspace=None) ``` ### Parameters -> **storage_format** (str) +> **storage_format** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The storage format for the semantic model. Valid options: 'Large', 'Small'. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2082,47 +1801,40 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.translate_semantic_model( - dataset='AdvWorks', - languages=['it-IT', 'zh-CN'], - exclude_characters=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.translate_semantic_model(dataset=, languages=, exclude_characters=None, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **languages** (str, List[str]) +> **languages** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]]) > >> Required; The language code(s) in which to translate the semantic model. > -> **exclude_characters** (str) +> **exclude_characters** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A string specifying characters which will be replaced by a space in the translation text when sent to the translation service. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; Shows a pandas dataframe which displays all of the translations in the semantic model. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Shows a pandas dataframe which displays all of the translations in the semantic model. ### [unassign_workspace_from_capacity](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.unassign_workspace_from_capacity) #### Unassigns a workspace from its assigned capacity. ```python import sempy_labs as labs -labs.unassign_workspace_from_capacity( - workspace=None, # This parameter is optional -) +labs.unassign_workspace_from_capacity(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2133,66 +1845,55 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.update_custom_pool( - pool_name='', - node_size=None, # This parameter is optional - min_node_count=None, # This parameter is optional - max_node_count=None, # This parameter is optional - min_executors=None, # This parameter is optional - max_executors=None, # This parameter is optional - node_family=None, # This parameter is optional - auto_scale_enabled=None, # This parameter is optional - dynamic_executor_allocation_enabled=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.update_custom_pool(pool_name=, node_size=None, min_node_count=None, max_node_count=None, min_executors=None, max_executors=None, node_family=None, auto_scale_enabled=None, dynamic_executor_allocation_enabled=None, workspace=None) ``` ### Parameters -> **pool_name** (str) +> **pool_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The custom pool name. > -> **node_size** (str, default=None) +> **node_size** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The [node size](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodesize). Defaults to None which keeps the existing property setting. > -> **min_node_count** (int, default=None) +> **min_node_count** (Optional[int] = None) > >> Optional; The [minimum node count](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). Defaults to None which keeps the existing property setting. > -> **max_node_count** (int, default=None) +> **max_node_count** (Optional[int] = None) > >> Optional; The [maximum node count](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). Defaults to None which keeps the existing property setting. > -> **min_executors** (int, default=None) +> **min_executors** (Optional[int] = None) > >> Optional; The [minimum executors](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). Defaults to None which keeps the existing property setting. > -> **max_executors** (int, default=None) +> **max_executors** (Optional[int] = None) > >> Optional; The [maximum executors](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). Defaults to None which keeps the existing property setting. > -> **node_family** (str, default=None) +> **node_family** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The [node family](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#nodefamily). Defaults to None which keeps the existing property setting. > -> **auto_scale_enabled** (bool, default=None) +> **auto_scale_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; The status of [auto scale](https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties). Defaults to None which keeps the existing property setting. > -> **dynamic_executor_allocation_enabled** (bool, default=None) +> **dynamic_executor_allocation_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; The status of the [dynamic executor allocation](https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties). Defaults to None which keeps the existing property setting. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2203,34 +1904,28 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.update_from_git( - remote_commit_hash='', - conflict_resolution_policy='', - workspace_head=None, # This parameter is optional - allow_override=False, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.update_from_git(remote_commit_hash=, conflict_resolution_policy=, workspace_head=None, allow_override=False, workspace=None) ``` ### Parameters -> **workspace_head** (str) +> **remote_commit_hash** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > ->> Optional; Full SHA hash that the workspace is synced to. This value may be null only after Initialize Connection. -In other cases, the system will validate that the given value is aligned with the head known to the system. +>> Required; None > -> **remove_commit_hash** (str) +> **conflict_resolution_policy** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > ->> Optional; Remote full SHA commit hash. +>> Required; None > -> **confilict_resolution_policy** (str) +> **workspace_head** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The [conflict resolution policy](https://learn.microsoft.com/rest/api/fabric/core/git/update-from-git?tabs=HTTP#conflictresolutionpolicy). +>> Optional; Full SHA hash that the workspace is synced to. This value may be null only after Initialize Connection. +In other cases, the system will validate that the given value is aligned with the head known to the system. > -> **allow_override** (bool, default=False) +> **allow_override** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; None > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2241,33 +1936,27 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.update_item( - item_type='', - current_name='', - new_name='', - description=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.update_item(item_type=, current_name=, new_name=, description=None, workspace=None) ``` ### Parameters -> **item_type** (str) +> **item_type** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Type of item to update. > -> **current_name** (str) +> **current_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The current name of the item. > -> **new_name** (str) +> **new_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The new name of the item. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the item. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2278,61 +1967,51 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.update_spark_settings( - automatic_log_enabled=None, # This parameter is optional - high_concurrency_enabled=None, # This parameter is optional - customize_compute_enabled=None, # This parameter is optional - default_pool_name=None, # This parameter is optional - max_node_count=None, # This parameter is optional - max_executors=None, # This parameter is optional - environment_name=None, # This parameter is optional - runtime_version=None, # This parameter is optional - workspace=None, # This parameter is optional -) +labs.update_spark_settings(automatic_log_enabled=None, high_concurrency_enabled=None, customize_compute_enabled=None, default_pool_name=None, max_node_count=None, max_executors=None, environment_name=None, runtime_version=None, workspace=None) ``` ### Parameters -> **automatic_log_enabled** (bool, default=None) +> **automatic_log_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; The status of the [automatic log](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#automaticlogproperties). Defaults to None which keeps the existing property setting. > -> **high_concurrency_enabled** (bool, default=None) +> **high_concurrency_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; The status of the [high concurrency](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#highconcurrencyproperties) for notebook interactive run. Defaults to None which keeps the existing property setting. > -> **customize_compute_enabled** (bool, default=None) +> **customize_compute_enabled** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; [Customize compute](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties) configurations for items. Defaults to None which keeps the existing property setting. > -> **default_pool_name** (str, default=None) +> **default_pool_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; [Default pool](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties) for workspace. Defaults to None which keeps the existing property setting. > -> **max_node_count** (int, default=None) +> **max_node_count** (Optional[int] = None) > >> Optional; The [maximum node count](https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties). Defaults to None which keeps the existing property setting. > -> **max_executors** (int, default=None) +> **max_executors** (Optional[int] = None) > >> Optional; The [maximum executors](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties). Defaults to None which keeps the existing property setting. > -> **environment_name** (str, default=None) +> **environment_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the [default environment](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties). Empty string indicated there is no workspace default environment Defaults to None which keeps the existing property setting. > -> **runtime_version** (str, default=None) +> **runtime_version** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The [runtime version](https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#environmentproperties). Defaults to None which keeps the existing property setting. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2343,28 +2022,23 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.update_workspace_user( - email_address='hello@goodbye.com', - role_name='', - principal_type='User', # This parameter is optional - workspace=None, # This parameter is optional -) +labs.update_workspace_user(email_address=, role_name=, principal_type=User, workspace=None) ``` ### Parameters -> **email_address** (str) +> **email_address** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The email address of the user. > -> **role_name** (str) +> **role_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The [role](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright) of the user within the workspace. > -> **principal_type** (str, default='User') +> **principal_type** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'User') > >> Optional; The [principal type](https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype). > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the workspace. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2375,68 +2049,60 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -labs.vertipaq_analyzer( - dataset='AdvWorks', - workspace=None, # This parameter is optional - export=None, # This parameter is optional - read_stats_from_data=False, # This parameter is optional -) +labs.vertipaq_analyzer(dataset=, workspace=None, export=None, read_stats_from_data=False) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **export** (str, default=None) +> **export** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Specifying 'zip' will export the results to a zip file in your lakehouse (which can be imported using the import_vertipaq_analyzer function. Specifying 'table' will export the results to delta tables (appended) in your lakehouse. Default value: None. > -> **read_stats_from_data** (bool, default=False) +> **read_stats_from_data** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Setting this parameter to true has the function get Column Cardinality and Missing Rows using DAX (Direct Lake semantic models achieve this using a Spark query to the lakehouse). > +> **kwargs** (**kwargs) +> +>> Required; None +> ### [add_table_to_direct_lake_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.add_table_to_direct_lake_semantic_model) #### Adds a table and all of its columns to a Direct Lake semantic model, based on a Fabric lakehouse table. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.add_table_to_direct_lake_semantic_model( - dataset='AdvWorks', - table_name='', - lakehouse_table_name='', - refresh=True, # This parameter is optional - workspace=None, # This parameter is optional -) +directlake.add_table_to_direct_lake_semantic_model(dataset=, table_name=, lakehouse_table_name=, refresh=True, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **table_name** (str, List[str]) +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in the semantic model. > -> **lakehouse_table_name** (str) +> **lakehouse_table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the Fabric lakehouse table. > -> **refresh** (bool, default=True) +> **refresh** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; Refreshes the table after it is added to the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which the semantic model resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2447,125 +2113,111 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.check_fallback_reason( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +directlake.check_fallback_reason(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; The tables in the semantic model and their fallback reason. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); The tables in the semantic model and their fallback reason. ### [direct_lake_schema_compare](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.direct_lake_schema_compare) #### Checks that all the tables in a Direct Lake semantic model map to tables in their corresponding lakehouse and that the columns in each table exist. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.direct_lake_schema_compare( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +directlake.direct_lake_schema_compare(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > +> **kwargs** (**kwargs) +> +>> Required; None +> ### [direct_lake_schema_sync](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.direct_lake_schema_sync) #### Shows/adds columns which exist in the lakehouse but do not exist in the semantic model (only for tables in the semantic model). ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.direct_lake_schema_sync( - dataset='AdvWorks', - workspace=None, # This parameter is optional - add_to_model=False, # This parameter is optional -) +directlake.direct_lake_schema_sync(dataset=, workspace=None, add_to_model=False) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **add_to_model** (bool, default=False) +> **add_to_model** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If set to True, columns which exist in the lakehouse but do not exist in the semantic model are added to the semantic model. No new tables are added. > +> **kwargs** (**kwargs) +> +>> Required; None +> ### [generate_direct_lake_semantic_model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model) #### Dynamically generates a Direct Lake semantic model based on tables in a Fabric lakehouse. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.generate_direct_lake_semantic_model( - dataset='AdvWorks', - lakehouse_tables='', - workspace=None, # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional - overwrite=False, # This parameter is optional - refresh=True, # This parameter is optional -) +directlake.generate_direct_lake_semantic_model(dataset=, lakehouse_tables=, workspace=None, lakehouse=None, lakehouse_workspace=None, overwrite=False, refresh=True) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model to be created. > -> **lakehouse_tables** (str | List[str]) +> **lakehouse_tables** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]]) > >> Required; The table(s) within the Fabric lakehouse to add to the semantic model. All columns from these tables will be added to the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the semantic model will reside. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The lakehouse which stores the delta tables which will feed the Direct Lake semantic model. Defaults to None which resolves to the attached lakehouse. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace in which the lakehouse resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **overwrite** (bool, default=False) +> **overwrite** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; If set to True, overwrites the existing semantic model if it already exists. > -> **refresh** (bool, default=True) +> **refresh** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; If True, refreshes the newly created semantic model after it is created. > @@ -2574,75 +2226,64 @@ or if no lakehouse attached, resolves to the workspace of the notebook. based on Microsoft's [online documentation](https://learn.microsoft.com/power-bi/enterprise/directlake-overview). ```python import sempy_labs as labs -import sempy_labs.directlake as directlake directlake.get_direct_lake_guardrails() ``` ### Returns -> pandas.DataFrame; A table showing the Direct Lake guardrails by SKU. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A table showing the Direct Lake guardrails by SKU. ### [get_direct_lake_lakehouse](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_lakehouse) #### Identifies the lakehouse used by a Direct Lake semantic model. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.get_direct_lake_lakehouse( - dataset='AdvWorks', - workspace=None, # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +directlake.get_direct_lake_lakehouse(dataset=, workspace=None, lakehouse=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str, uuid.UUID; The lakehouse name and lakehouse ID. +> [str](https://docs.python.org/3/library/stdtypes.html#str), uuid.UUID; The lakehouse name and lakehouse ID. ### [get_direct_lake_source](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_direct_lake_source) #### Obtains the source information for a direct lake semantic model. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.get_direct_lake_source( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +directlake.get_direct_lake_source(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> Tuple[str, str, UUID, UUID]; If the source of the direct lake semantic model is a lakehouse this will return: 'Lakehouse', Lakehouse Name, SQL Endpoint Id, Workspace Id +> Tuple[[str](https://docs.python.org/3/library/stdtypes.html#str), [str](https://docs.python.org/3/library/stdtypes.html#str), UUID, UUID]; If the source of the direct lake semantic model is a lakehouse this will return: 'Lakehouse', Lakehouse Name, SQL Endpoint Id, Workspace Id If the source of the direct lake semantic model is a warehouse this will return: 'Warehouse', Warehouse Name, Warehouse Id, Workspace Id If the semantic model is not a Direct Lake semantic model, it will return None, None, None. ### [get_directlake_guardrails_for_sku](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_directlake_guardrails_for_sku) @@ -2650,146 +2291,122 @@ If the semantic model is not a Direct Lake semantic model, it will return None, * Use the result of the 'get_sku_size' function as an input for this function's sku_size parameter.* ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.get_directlake_guardrails_for_sku( - sku_size='', -) +directlake.get_directlake_guardrails_for_sku(sku_size=) ``` ### Parameters -> **sku_size** (str) +> **sku_size** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Sku size of a workspace/capacity > ### Returns -> pandas.DataFrame; A table showing the Direct Lake guardrails for the given SKU. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A table showing the Direct Lake guardrails for the given SKU. ### [get_shared_expression](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_shared_expression) #### Dynamically generates the M expression used by a Direct Lake model for a given lakehouse. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.get_shared_expression( - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional -) +directlake.get_shared_expression(lakehouse=None, workspace=None) ``` ### Parameters -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint. +> [str](https://docs.python.org/3/library/stdtypes.html#str); Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint. ### [get_sku_size](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.get_sku_size) #### Shows the SKU size for a workspace. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.get_sku_size( - workspace=None, # This parameter is optional -) +directlake.get_sku_size(workspace=None) ``` ### Parameters -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; The SKU size for a workspace. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The SKU size for a workspace. ### [list_direct_lake_model_calc_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.list_direct_lake_model_calc_tables) #### Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery). ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.list_direct_lake_model_calc_tables( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +directlake.list_direct_lake_model_calc_tables(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations. ### [show_unsupported_direct_lake_objects](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.show_unsupported_direct_lake_objects) -#### Returns a list of a semantic model's objects which are not supported by Direct Lake based on -[official documentation](https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations). +#### Returns a list of a semantic model's objects which are not supported by Direct Lake based on [official documentation](https://learn.microsoft.com/power-bi/enterprise/directlake-overview#known-issues-and-limitations). + ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.show_unsupported_direct_lake_objects( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +directlake.show_unsupported_direct_lake_objects(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame, pandas.DataFrame, pandas.DataFrame; 3 pandas dataframes showing objects in a semantic model which are not supported by Direct Lake. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame), [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame), [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); 3 pandas dataframes showing objects in a semantic model which are not supported by Direct Lake. ### [update_direct_lake_model_lakehouse_connection](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_model_lakehouse_connection) #### Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.update_direct_lake_model_lakehouse_connection( - dataset='AdvWorks', - workspace=None, # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +directlake.update_direct_lake_model_lakehouse_connection(dataset=, workspace=None, lakehouse=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2800,134 +2417,114 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.update_direct_lake_partition_entity( - dataset='AdvWorks', - table_name='', - entity_name='', - workspace=None, # This parameter is optional -) +directlake.update_direct_lake_partition_entity(dataset=, table_name=, entity_name=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **table_name** (str, List[str]) +> **table_name** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]]) > >> Required; Name of the table(s) in the semantic model. > -> **entity_name** (str, List[str]) +> **entity_name** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]]) > >> Required; Name of the lakehouse table to be mapped to the semantic model table. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > +> **kwargs** (**kwargs) +> +>> Required; None +> ### [warm_direct_lake_cache_isresident](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_isresident) #### Performs a refresh on the semantic model and puts the columns which were in memory prior to the refresh back into memory. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.warm_direct_lake_cache_isresident( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +directlake.warm_direct_lake_cache_isresident(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; Returns a pandas dataframe showing the columns that have been put into memory. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Returns a pandas dataframe showing the columns that have been put into memory. ### [warm_direct_lake_cache_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_perspective) #### Warms the cache of a Direct Lake semantic model by running a simple DAX query against the columns in a perspective. ```python import sempy_labs as labs -import sempy_labs.directlake as directlake -directlake.warm_direct_lake_cache_perspective( - dataset='AdvWorks', - perspective='', - add_dependencies=False, # This parameter is optional - workspace=None, # This parameter is optional -) +directlake.warm_direct_lake_cache_perspective(dataset=, perspective=, add_dependencies=False, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **perspective** (str) +> **perspective** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the perspective which contains objects to be used for warming the cache. > -> **add_dependencies** (bool, default=False) +> **add_dependencies** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Includes object dependencies in the cache warming process. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; Returns a pandas dataframe showing the columns that have been put into memory. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Returns a pandas dataframe showing the columns that have been put into memory. ### [create_shortcut_onelake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.create_shortcut_onelake) #### Creates a [shortcut](https://learn.microsoft.com/fabric/onelake/onelake-shortcuts) to a delta table in OneLake. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake -lake.create_shortcut_onelake( - table_name='', - source_lakehouse='', - source_workspace='', - destination_lakehouse='', - destination_workspace=None, # This parameter is optional - shortcut_name=None, # This parameter is optional -) +lake.create_shortcut_onelake(table_name=, source_lakehouse=, source_workspace=, destination_lakehouse=, destination_workspace=None, shortcut_name=None) ``` ### Parameters -> **table_name** (str) +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The table name for which a shortcut will be created. > -> **source_lakehouse** (str) +> **source_lakehouse** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The Fabric lakehouse in which the table resides. > -> **source_workspace** (str) +> **source_workspace** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the Fabric workspace in which the source lakehouse exists. > -> **destination_lakehouse** (str) +> **destination_lakehouse** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The Fabric lakehouse in which the shortcut will be created. > -> **destination_workspace** (str, default=None) +> **destination_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which the shortcut will be created. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **shortcut_name** (str, default=None) +> **shortcut_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the shortcut 'table' to be created. This defaults to the 'table_name' parameter value. > @@ -2936,25 +2533,20 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake -lake.delete_shortcut( - shortcut_name='', - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional -) +lake.delete_shortcut(shortcut_name=, lakehouse=None, workspace=None) ``` ### Parameters -> **shortcut_name** (str) +> **shortcut_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The name of the shortcut. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse name in which the shortcut resides. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which lakehouse resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -2965,104 +2557,83 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake -lake.get_lakehouse_columns( - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional -) +lake.get_lakehouse_columns(lakehouse=None, workspace=None) ``` ### Parameters -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The Fabric workspace used by the lakehouse. -Defaults to None which resolves to the workspace of the attached lakehouse -or if no lakehouse attached, resolves to the workspace of the notebook. +>> Optional; None > ### Returns -> pandas.DataFrame; Shows the tables/columns within a lakehouse and their properties. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Shows the tables/columns within a lakehouse and their properties. ### [get_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) #### Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake -lake.get_lakehouse_tables( - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional - extended=False, # This parameter is optional - count_rows=False, # This parameter is optional - export=False, # This parameter is optional -) +lake.get_lakehouse_tables(lakehouse=None, workspace=None, extended=False, count_rows=False, export=False) ``` ### Parameters -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The Fabric workspace used by the lakehouse. -Defaults to None which resolves to the workspace of the attached lakehouse -or if no lakehouse attached, resolves to the workspace of the notebook. +>> Optional; None > -> **extended** (bool, default=False) +> **extended** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Obtains additional columns relevant to the size of each table. > -> **count_rows** (bool, default=False) +> **count_rows** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Obtains a row count for each lakehouse table. > -> **export** (bool, default=False) +> **export** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Exports the resulting dataframe to a delta table in the lakehouse. > ### Returns -> pandas.DataFrame; Shows the tables/columns within a lakehouse and their properties. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); Shows the tables/columns within a lakehouse and their properties. ### [lakehouse_attached](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.lakehouse_attached) #### Identifies if a lakehouse is attached to the notebook. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake lake.lakehouse_attached() ``` ### Returns -> bool; Returns True if a lakehouse is attached to the notebook. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Returns True if a lakehouse is attached to the notebook. ### [optimize_lakehouse_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.optimize_lakehouse_tables) #### Runs the [OPTIMIZE](https://docs.delta.io/latest/optimizations-oss.html) function over the specified lakehouse tables. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake -lake.optimize_lakehouse_tables( - tables=None, # This parameter is optional - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional -) +lake.optimize_lakehouse_tables(tables=None, lakehouse=None, workspace=None) ``` ### Parameters -> **tables** (str | List[str], default=None) +> **tables** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = None) > >> Optional; The table(s) to optimize. Defaults to None which resovles to optimizing all tables within the lakehouse. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3073,32 +2644,26 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.lakehouse as lake -lake.vacuum_lakehouse_tables( - tables=None, # This parameter is optional - lakehouse=None, # This parameter is optional - workspace=None, # This parameter is optional - retain_n_hours=None, # This parameter is optional -) +lake.vacuum_lakehouse_tables(tables=None, lakehouse=None, workspace=None, retain_n_hours=None) ``` ### Parameters -> **tables** (str | List[str] | None) +> **tables** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = None) > >> Optional; The table(s) to vacuum. If no tables are specified, all tables in the lakehouse will be optimized. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **retain_n_hours** (int, default=None) +> **retain_n_hours** (Optional[int] = None) > >> Optional; The number of hours to retain historical versions of Delta table files. Files older than this retention period will be deleted during the vacuum operation. @@ -3113,26 +2678,21 @@ Dataflows Gen2 has a `limit of 50 tables **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **file_name** (str, default='PowerQueryTemplate') +> **file_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'PowerQueryTemplate') > >> Optional; The name of the Power Query Template file to be generated. > @@ -3141,44 +2701,36 @@ or if no lakehouse attached, resolves to the workspace of the notebook. The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.migrate_calc_tables_to_lakehouse( - dataset='AdvWorks', - new_dataset='', - workspace=None, # This parameter is optional - new_dataset_workspace=None, # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +migration.migrate_calc_tables_to_lakehouse(dataset=, new_dataset=, workspace=None, new_dataset_workspace=None, lakehouse=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the import/DirectQuery semantic model. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Direct Lake semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str) +> **new_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3189,44 +2741,36 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.migrate_calc_tables_to_semantic_model( - dataset='AdvWorks', - new_dataset='', - workspace=None, # This parameter is optional - new_dataset_workspace=None, # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +migration.migrate_calc_tables_to_semantic_model(dataset=, new_dataset=, workspace=None, new_dataset_workspace=None, lakehouse=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the import/DirectQuery semantic model. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Direct Lake semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str) +> **new_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3237,31 +2781,25 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.migrate_field_parameters( - dataset='AdvWorks', - new_dataset='', - workspace=None, # This parameter is optional - new_dataset_workspace=None, # This parameter is optional -) +migration.migrate_field_parameters(dataset=, new_dataset=, workspace=None, new_dataset_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the import/DirectQuery semantic model. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Direct Lake semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str) +> **new_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3272,31 +2810,25 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.migrate_model_objects_to_semantic_model( - dataset='AdvWorks', - new_dataset='', - workspace=None, # This parameter is optional - new_dataset_workspace=None, # This parameter is optional -) +migration.migrate_model_objects_to_semantic_model(dataset=, new_dataset=, workspace=None, new_dataset_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the import/DirectQuery semantic model. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Direct Lake semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str) +> **new_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3307,44 +2839,36 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.migrate_tables_columns_to_semantic_model( - dataset='AdvWorks', - new_dataset='', - workspace=None, # This parameter is optional - new_dataset_workspace=None, # This parameter is optional - lakehouse=None, # This parameter is optional - lakehouse_workspace=None, # This parameter is optional -) +migration.migrate_tables_columns_to_semantic_model(dataset=, new_dataset=, workspace=None, new_dataset_workspace=None, lakehouse=None, lakehouse_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the import/DirectQuery semantic model. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Direct Lake semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str) +> **new_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **lakehouse** (str, default=None) +> **lakehouse** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric lakehouse used by the Direct Lake semantic model. Defaults to None which resolves to the lakehouse attached to the notebook. > -> **lakehouse_workspace** (str, default=None) +> **lakehouse_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3355,56 +2879,46 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.migration_validation( - dataset='AdvWorks', - new_dataset='', - workspace=None, # This parameter is optional - new_dataset_workspace=None, # This parameter is optional -) +migration.migration_validation(dataset=, new_dataset=, workspace=None, new_dataset_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the import/DirectQuery semantic model. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Direct Lake semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the import/DirectQuery semantic model exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str) +> **new_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the Direct Lake semantic model will be created. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully. ### [refresh_calc_tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.migration.html#sempy_labs.migration.refresh_calc_tables) #### Recreates the delta tables in the lakehouse based on the DAX expressions stored as model annotations in the Direct Lake semantic model. ```python import sempy_labs as labs -import sempy_labs.migration as migration -migration.refresh_calc_tables( - dataset='AdvWorks', - workspace=None, # This parameter is optional -) +migration.refresh_calc_tables(dataset=, workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3415,44 +2929,36 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.clone_report( - report='', - cloned_report='', - workspace=None, # This parameter is optional - target_workspace=None, # This parameter is optional - target_dataset=None, # This parameter is optional - target_dataset_workspace=None, # This parameter is optional -) +rep.clone_report(report=, cloned_report=, workspace=None, target_workspace=None, target_dataset=None, target_dataset_workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Power BI report. > -> **cloned_report** (str) +> **cloned_report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the new Power BI report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **target_workspace** (str, default=None) +> **target_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace to place the cloned report. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **target_dataset** (str, default=None) +> **target_dataset** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the semantic model to be used by the cloned report. Defaults to None which resolves to the semantic model used by the initial report. > -> **target_dataset_workspace** (str, default=None) +> **target_dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The workspace in which the semantic model to be used by the report resides. Defaults to None which resolves to the semantic model used by the initial report. @@ -3462,26 +2968,21 @@ Defaults to None which resolves to the semantic model used by the initial report ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.create_model_bpa_report( - report='ModelBPA', # This parameter is optional - dataset='ModelBPA', # This parameter is optional - dataset_workspace=None, # This parameter is optional -) +rep.create_model_bpa_report(report=ModelBPA, dataset=AdvWorks, dataset_workspace=None) ``` ### Parameters -> **report** (str, default='ModelBPA') +> **report** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'ModelBPA') > >> Optional; Name of the report. Defaults to 'ModelBPA'. > -> **dataset** (str, default='ModelBPA') +> **dataset** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'ModelBPA') > >> Optional; Name of the semantic model which feeds this report. Defaults to 'ModelBPA' > -> **dataset_workspace** (str, default=None) +> **dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the semantic model resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3492,34 +2993,27 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.create_report_from_reportjson( - report='', - dataset='AdvWorks', - report_json='', - theme_json=None, # This parameter is optional - workspace=None, # This parameter is optional -) +rep.create_report_from_reportjson(report=, dataset=, report_json=, theme_json=None, workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the report. > -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model to connect to the report. > -> **report_json** (dict) +> **report_json** ([dict](https://docs.python.org/3/library/typing.html#typing.Dict)) > >> Required; The report.json file to be used to create the report. > -> **theme_json** (dict, default=None) +> **theme_json** (Optional[[dict](https://docs.python.org/3/library/typing.html#typing.Dict)] = None) > >> Optional; The theme.json file to be used for the theme of the report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3530,50 +3024,40 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.export_report( - report='', - export_format='', - file_name=None, # This parameter is optional - bookmark_name=None, # This parameter is optional - page_name=None, # This parameter is optional - visual_name=None, # This parameter is optional - report_filter=None, # This parameter is optional - workspace=None, # This parameter is optional -) +rep.export_report(report=, export_format=, file_name=None, bookmark_name=None, page_name=None, visual_name=None, report_filter=None, workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Power BI report. > -> **export_format** (str) +> **export_format** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The format in which to export the report. For image formats, enter the file extension in this parameter, not 'IMAGE'. [Valid formats](https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group#fileformat) > -> **file_name** (str, default=None) +> **file_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the file to be saved within the lakehouse. Do not include the file extension. Defaults ot the reportName parameter value. > -> **bookmark_name** (str, default=None) +> **bookmark_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name (GUID) of a bookmark within the report. > -> **page_name** (str, default=None) +> **page_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name (GUID) of the report page. > -> **visual_name** (str, default=None) +> **visual_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name (GUID) of a visual. If you specify this parameter you must also specify the page_name parameter. > -> **report_filter** (str, default=None) +> **report_filter** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A report filter to be applied when exporting the report. Syntax is user-friendly. See above for examples. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3584,111 +3068,92 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.get_report_definition( - report='', - workspace=None, # This parameter is optional -) +rep.get_report_definition(report=, workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the report resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> pandas.DataFrame; The collection of report definition files within a pandas dataframe. +> [pandas.DataFrame](http://pandas.pydata.org/pandas-docs/dev/reference/api/pandas.DataFrame.html#pandas.DataFrame); The collection of report definition files within a pandas dataframe. ### [get_report_json](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.get_report_json) #### Gets the report.json file content of a Power BI report. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.get_report_json( - report='', - workspace=None, # This parameter is optional - save_to_file_name=None, # This parameter is optional -) +rep.get_report_json(report=, workspace=None, save_to_file_name=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Power BI report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the report exists. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **save_to_file_name** (str, default=None) +> **save_to_file_name** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Specifying this parameter will save the report.json file to the lakehouse attached to the notebook with the file name of this parameter. > ### Returns -> dict; The report.json file for a given Power BI report. +> [dict](https://docs.python.org/3/library/typing.html#typing.Dict); The report.json file for a given Power BI report. ### [launch_report](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.launch_report) #### Shows a Power BI report within a Fabric notebook. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.launch_report( - report='', - workspace=None, # This parameter is optional -) +rep.launch_report(report=, workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the Power BI report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > ### Returns -> str; An embedded Power BI report within the notebook. +> [str](https://docs.python.org/3/library/stdtypes.html#str); An embedded Power BI report within the notebook. ### [report_rebind](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind) #### Rebinds a report to a semantic model. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.report_rebind( - report='', - dataset='AdvWorks', - report_workspace=None, # This parameter is optional - dataset_workspace=None, # This parameter is optional -) +rep.report_rebind(report=, dataset=, report_workspace=None, dataset_workspace=None) ``` ### Parameters -> **report** (str | List[str]) +> **report** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]]) > >> Required; Name(s) of the Power BI report(s). > -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model. > -> **report_workspace** (str, default=None) +> **report_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which the report resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **dataset_workspace** (str, default=None) +> **dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which the semantic model resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3699,38 +3164,29 @@ or if no lakehouse attached, resolves to the workspace of the notebook. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.report_rebind_all( - dataset='AdvWorks', - new_dataset='', - dataset_workspace=None, # This parameter is optional - new_dataset_workpace=None, # This parameter is optional - report_workspace=None, # This parameter is optional -) +rep.report_rebind_all(dataset=, new_dataset=, dataset_workspace=None, new_dataset_workpace=None, report_workspace=None) ``` ### Parameters -> **dataset** (str) +> **dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model currently binded to the reports. > -> **new_dataset** (str) +> **new_dataset** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the semantic model to rebind to the reports. > -> **dataset_workspace** (str, default=None) +> **dataset_workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The name of the Fabric workspace in which the original semantic model resides. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. > -> **new_dataset_workspace** (str, default=None) +> **new_dataset_workpace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The name of the Fabric workspace in which the new semantic model resides. -Defaults to None which resolves to the workspace of the attached lakehouse -or if no lakehouse attached, resolves to the workspace of the notebook. +>> Optional; None > -> **report_workspace** (str | List[str], default=None) +> **report_workspace** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)], NoneType] = None) > >> Optional; The name(s) of the Fabric workspace(s) in which the report(s) reside(s). Defaults to None which finds all reports in all workspaces which use the semantic model and rebinds them to @@ -3741,24 +3197,19 @@ the new semantic model. ```python import sempy_labs as labs -import sempy_labs.report as rep -rep.update_report_from_reportjson( - report='', - report_json='', - workspace=None, # This parameter is optional -) +rep.update_report_from_reportjson(report=, report_json=, workspace=None) ``` ### Parameters -> **report** (str) +> **report** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the report. > -> **report_json** (dict) +> **report_json** ([dict](https://docs.python.org/3/library/typing.html#typing.Dict)) > >> Required; The report.json file to be used to update the report. > -> **workspace** (str, default=None) +> **workspace** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The Fabric workspace name in which the report resides. Defaults to None which resolves to the workspace of the attached lakehouse @@ -3771,63 +3222,55 @@ or if no lakehouse attached, resolves to the workspace of the notebook. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_calculated_column( - table_name='', - column_name='', - expression='', - data_type='', - format_string=None, # This parameter is optional - hidden=False, # This parameter is optional - description=None, # This parameter is optional - display_folder=None, # This parameter is optional - data_category=None, # This parameter is optional - key=False, # This parameter is optional - summarize_by=None, # This parameter is optional -) +tom.add_calculated_column(table_name=, column_name=, expression=, data_type=, format_string=None, hidden=False, description=None, display_folder=None, data_category=None, key=False, summarize_by=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in which the column will be created. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DAX expression for the column. > -> **data_type** (str) +> **data_type** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The data type of the column. > -> **format_string** (str, default=None) +> **format_string** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Format string of the column. > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the column will be hidden or visible. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the column. > -> **display_folder** (str, default=None) +> **display_folder** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The display folder in which the column will reside. > -> **data_category** (str, default=None) +> **data_category** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The data category of the column. > -> **key** (bool, default=False) +> **key** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Marks the column as the primary key of the table. > -> **summarize_by** (str, default=None) +> **summarize_by** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Sets the value for the Summarize By property of the column. Defaults to None which resolves to 'Default'. @@ -3839,33 +3282,31 @@ Defaults to None which resolves to 'Default'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_calculated_table( - name='', - expression='', - description=None, # This parameter is optional - data_category=None, # This parameter is optional - hidden=False, # This parameter is optional -) +tom.add_calculated_table(name=, expression=, description=None, data_category=None, hidden=False) ``` ### Parameters -> **name** (str) +> **self** (self) +> +>> Required; None +> +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DAX expression for the calculated table. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the table. > -> **data_catgegory** (str, default=None) +> **data_category** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The data category for the table. +>> Optional; None > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the table is hidden or visible. > @@ -3876,63 +3317,55 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_calculated_table_column( - table_name='', - column_name='', - source_column='', - data_type='', - format_string=None, # This parameter is optional - hidden=False, # This parameter is optional - description=None, # This parameter is optional - display_folder=None, # This parameter is optional - data_category=None, # This parameter is optional - key=False, # This parameter is optional - summarize_by=None, # This parameter is optional -) +tom.add_calculated_table_column(table_name=, column_name=, source_column=, data_type=, format_string=None, hidden=False, description=None, display_folder=None, data_category=None, key=False, summarize_by=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in which the column will be created. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **source_column** (str) +> **source_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The source column for the column. > -> **data_type** (str) +> **data_type** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The data type of the column. > -> **format_string** (str, default=None) +> **format_string** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Format string of the column. > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the column will be hidden or visible. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the column. > -> **display_folder** (str, default=None) +> **display_folder** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The display folder in which the column will reside. > -> **data_category** (str, default=None) +> **data_category** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The data category of the column. > -> **key** (bool, default=False) +> **key** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Marks the column as the primary key of the table. > -> **summarize_by** (str, default=None) +> **summarize_by** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Sets the value for the Summarize By property of the column. Defaults to None resolves to 'Default'. @@ -3944,16 +3377,15 @@ Defaults to None resolves to 'Default'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_calculation_group( - name='', - precedence='', - description=None, # This parameter is optional - hidden=False, # This parameter is optional -) +tom.add_calculation_group(name=, precedence=, description=None, hidden=False) ``` ### Parameters -> **name** (str) +> **self** (self) +> +>> Required; None +> +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the calculation group. > @@ -3961,11 +3393,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: > >> Required; The precedence of the calculation group. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the calculation group. > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the calculation group is hidden/visible. > @@ -3976,40 +3408,37 @@ a [calculation group](https://learn.microsoft.com/dotnet/api/microsoft.analysiss import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_calculation_item( - table_name='', - calculation_item_name='', - expression='', - ordinal=None, # This parameter is optional - description=None, # This parameter is optional - format_string_expression=None, # This parameter is optional -) +tom.add_calculation_item(table_name=, calculation_item_name=, expression=, ordinal=None, description=None, format_string_expression=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in which the calculation item will be created. > -> **calculation_item_name** (str) +> **calculation_item_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the calculation item. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DAX expression for the calculation item. > -> **ordinal** (int, default=None) +> **ordinal** (Optional[int] = None) > >> Optional; The ordinal of the calculation item. > -> **format_string_expression** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The format string expression for the calculation item. +>> Optional; A description of the calculation item. > -> **description** (str, default=None) +> **format_string_expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; A description of the calculation item. +>> Optional; The format string expression for the calculation item. > ### [add_data_column](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_data_column) #### Adds a data column to a table within a semantic model. @@ -4018,63 +3447,55 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_data_column( - table_name='', - column_name='', - source_column='', - data_type='', - format_string=None, # This parameter is optional - hidden=False, # This parameter is optional - description=None, # This parameter is optional - display_folder=None, # This parameter is optional - data_category=None, # This parameter is optional - key=False, # This parameter is optional - summarize_by=None, # This parameter is optional -) +tom.add_data_column(table_name=, column_name=, source_column=, data_type=, format_string=None, hidden=False, description=None, display_folder=None, data_category=None, key=False, summarize_by=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in which the column will be created. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **source_column** (str) +> **source_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The source column for the column. > -> **data_type** (str) +> **data_type** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The data type of the column. > -> **format_string** (str, default=None) +> **format_string** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Format string of the column. > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the column will be hidden or visible. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the column. > -> **display_folder** (str, default=None) +> **display_folder** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The display folder in which the column will reside. > -> **data_category** (str, default=None) +> **data_category** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The data category of the column. > -> **key** (bool, default=False) +> **key** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Marks the column as the primary key of the table. > -> **summarize_by** (str, default=None) +> **summarize_by** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Sets the value for the Summarize By property of the column. Defaults to None resolves to 'Default'. @@ -4086,29 +3507,28 @@ Defaults to None resolves to 'Default'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_entity_partition( - table_name='', - entity_name='', - expression=None, # This parameter is optional - description=None, # This parameter is optional -) +tom.add_entity_partition(table_name=, entity_name=, expression=None, description=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **entity_name** (str) +> **entity_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the lakehouse table. > -> **expression** (TOM Object, default=None) +> **expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The expression used by the table. Defaults to None which resolves to the 'DatabaseQuery' expression. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description for the partition. > @@ -4119,23 +3539,23 @@ Defaults to None which resolves to the 'DatabaseQuery' expression. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_expression( - name='', - expression='', - description=None, # This parameter is optional -) +tom.add_expression(name=, expression=, description=None) ``` ### Parameters -> **name** (str) +> **self** (self) +> +>> Required; None +> +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the expression. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The M expression of the expression. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the expression. > @@ -4146,25 +3566,25 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_field_parameter( - table_name='', - objects='', - object_names=None, # This parameter is optional -) +tom.add_field_parameter(table_name=, objects=, object_names=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **objects** (List[str]) +> **objects** (List[[str](https://docs.python.org/3/library/stdtypes.html#str)]) > >> Required; The columns/measures to be included in the field parameter. Columns must be specified as such : 'Table Name'[Column Name]. Measures may be formatted as '[Measure Name]' or 'Measure Name'. > -> **object_names** (List[str], default=None) +> **object_names** (List[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The corresponding visible name for the measures/columns in the objects list. Defaults to None which shows the measure/column name. @@ -4176,38 +3596,35 @@ Defaults to None which shows the measure/column name. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_hierarchy( - table_name='', - hierarchy_name='', - columns='', - levels=None, # This parameter is optional - hierarchy_description=None, # This parameter is optional - hierarchy_hidden=False, # This parameter is optional -) +tom.add_hierarchy(table_name=, hierarchy_name=, columns=, levels=None, hierarchy_description=None, hierarchy_hidden=False) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **hierarchy_name** (str) +> **hierarchy_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the hierarchy. > -> **columns** (List[str]) +> **columns** (List[[str](https://docs.python.org/3/library/stdtypes.html#str)]) > >> Required; Names of the columns to use within the hierarchy. > -> **levels** (List[str], default=None) +> **levels** (Optional[List[[str](https://docs.python.org/3/library/stdtypes.html#str)]] = None) > >> Optional; Names of the levels to use within the hierarhcy (instead of the column names). > -> **hierarchy_description** (str, default=None) +> **hierarchy_description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the hierarchy. > -> **hierarchy_hidden** (bool, default=False) +> **hierarchy_hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the hierarchy is visible or hidden. > @@ -4218,38 +3635,31 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_incremental_refresh_policy( - table_name='', - column_name='', - start_date='', - end_date='', - incremental_granularity='', - incremental_periods='', - rolling_window_granularity='', - rolling_window_periods='', - only_refresh_complete_days=False, # This parameter is optional - detect_data_changes_column=None, # This parameter is optional -) +tom.add_incremental_refresh_policy(table_name=, column_name=, start_date=, end_date=, incremental_granularity=, incremental_periods=, rolling_window_granularity=, rolling_window_periods=, only_refresh_complete_days=False, detect_data_changes_column=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DateTime column to be used for the RangeStart and RangeEnd parameters. > -> **start_date** (str) +> **start_date** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The date to be used for the RangeStart parameter. > -> **end_date** (str) +> **end_date** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The date to be used for the RangeEnd parameter. > -> **incremental_granularity** (str) +> **incremental_granularity** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Granularity of the (most recent) incremental refresh range. > @@ -4257,7 +3667,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: > >> Required; Number of periods for the incremental refresh range. > -> **rolling_window_granularity** (str) +> **rolling_window_granularity** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Target granularity of the rolling window for the whole semantic model. > @@ -4265,11 +3675,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: > >> Required; Number of periods for the rolling window for the whole semantic model. > -> **only_refresh_complete_days** (bool, default=False) +> **only_refresh_complete_days** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Lag or leading periods from Now() to the rolling window head. > -> **detect_data_changes_column** (str, default=None) +> **detect_data_changes_column** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The column to use for detecting data changes. Defaults to None which resolves to not detecting data changes. @@ -4281,35 +3691,33 @@ Defaults to None which resolves to not detecting data changes. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_m_partition( - table_name='', - partition_name='', - expression='', - mode=None, # This parameter is optional - description=None, # This parameter is optional -) +tom.add_m_partition(table_name=, partition_name=, expression=, mode=None, description=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **partition_name** (str) +> **partition_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the partition. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The M expression encapsulating the logic for the partition. > -> **mode** (str, default=None) +> **mode** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The query mode for the partition. Defaults to None which resolves to 'Import'. [Valid mode values](https://learn.microsoft.com/en-us/dotnet/api/microsoft.analysisservices.tabular.modetype?view=analysisservices-dotnet) > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description for the partition. > @@ -4320,48 +3728,43 @@ Defaults to None which resolves to 'Import'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_measure( - table_name='', - measure_name='', - expression='', - format_string=None, # This parameter is optional - hidden=False, # This parameter is optional - description=None, # This parameter is optional - display_folder=None, # This parameter is optional - format_string_expression=None, # This parameter is optional -) +tom.add_measure(table_name=, measure_name=, expression=, format_string=None, hidden=False, description=None, display_folder=None, format_string_expression=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in which the measure will be created. > -> **measure_name** (str) +> **measure_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the measure. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; DAX expression of the measure. > -> **format_string** (str, default=None) +> **format_string** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Format string of the measure. > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the measure will be hidden or visible. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the measure. > -> **display_folder** (str, default=None) +> **display_folder** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The display folder in which the measure will reside. > -> **format_string_expression** (str, default=None) +> **format_string_expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The format string expression. > @@ -4372,13 +3775,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_perspective( - perspective_name='', -) +tom.add_perspective(perspective_name=) ``` ### Parameters -> **perspective_name** (str) +> **self** (self) +> +>> Required; None +> +> **perspective_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the perspective. > @@ -4389,60 +3794,53 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_relationship( - from_table='', - from_column='', - to_table='', - to_column='', - from_cardinality='', - to_cardinality='', - cross_filtering_behavior=None, # This parameter is optional - is_active=True, # This parameter is optional - security_filtering_behavior=None, # This parameter is optional - rely_on_referential_integrity=False, # This parameter is optional -) +tom.add_relationship(from_table=, from_column=, to_table=, to_column=, from_cardinality=, to_cardinality=, cross_filtering_behavior=None, is_active=True, security_filtering_behavior=None, rely_on_referential_integrity=False) ``` ### Parameters -> **from_table** (str) +> **self** (self) +> +>> Required; None +> +> **from_table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table on the 'from' side of the relationship. > -> **from_column** (str) +> **from_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column on the 'from' side of the relationship. > -> **to_table** (str) +> **to_table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table on the 'to' side of the relationship. > -> **to_column** (str) +> **to_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column on the 'to' side of the relationship. > -> **from_cardinality** (str) +> **from_cardinality** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The cardinality of the 'from' side of the relationship. Options: ['Many', 'One', 'None']. > -> **to_cardinality** (str) +> **to_cardinality** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The cardinality of the 'to' side of the relationship. Options: ['Many', 'One', 'None']. > -> **cross_filtering_behavior** (str, default=None) +> **cross_filtering_behavior** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Setting for the cross filtering behavior of the relationship. Options: ('Automatic', 'OneDirection', 'BothDirections'). Defaults to None which resolves to 'Automatic'. > -> **is_active** (bool, default=True) +> **is_active** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; Setting for whether the relationship is active or not. > -> **security_filtering_behavior** (str, default=None) +> **security_filtering_behavior** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Setting for the security filtering behavior of the relationship. Options: ('None', 'OneDirection', 'BothDirections'). Defaults to None which resolves to 'OneDirection'. > -> **rely_on_referential_integrity** (bool, default=False) +> **rely_on_referential_integrity** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Setting for the rely on referential integrity of the relationship. > @@ -4453,24 +3851,24 @@ Defaults to None which resolves to 'OneDirection'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_role( - role_name='', - model_permission=None, # This parameter is optional - description=None, # This parameter is optional -) +tom.add_role(role_name=, model_permission=None, description=None) ``` ### Parameters -> **role_name** (str) +> **self** (self) +> +>> Required; None +> +> **role_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the role. > -> **model_permission** (str, default=None) +> **model_permission** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The model permission for the role. Defaults to None which resolves to 'Read'. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the role. > @@ -4481,28 +3879,27 @@ Defaults to None which resolves to 'Read'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_table( - name='', - description=None, # This parameter is optional - data_category=None, # This parameter is optional - hidden=False, # This parameter is optional -) +tom.add_table(name=, description=None, data_category=None, hidden=False) ``` ### Parameters -> **name** (str) +> **self** (self) +> +>> Required; None +> +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the table. > -> **data_catgegory** (str, default=None) +> **data_category** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > ->> Optional; The data category for the table. +>> Optional; None > -> **hidden** (bool, default=False) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Whether the table is hidden or visible. > @@ -4513,23 +3910,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_time_intelligence( - measure_name='', - date_table='', - time_intel='', -) +tom.add_time_intelligence(measure_name=, date_table=, time_intel=) ``` ### Parameters -> **measure_name** (str) +> **self** (self) +> +>> Required; None +> +> **measure_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the measure > -> **date_table** (str) +> **date_table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the date table. > -> **time_intel** (str, List[str]) +> **time_intel** (Union[[str](https://docs.python.org/3/library/stdtypes.html#str), List[[str](https://docs.python.org/3/library/stdtypes.html#str)]]) > >> Required; Time intelligence measures to create (i.e. MTD, YTD, QTD). > @@ -4540,18 +3937,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_to_perspective( - object=, - perspective_name='', -) +tom.add_to_perspective(object=, perspective_name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure'), ForwardRef('TOM.Hierarchy')]) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **perspective_name** (str) +> **perspective_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the perspective. > @@ -4562,13 +3960,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.add_translation( - language='', -) +tom.add_translation(language=) ``` ### Parameters -> **language** (str) +> **self** (self) +> +>> Required; None +> +> **language** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The language code (i.e. 'it-IT' for Italian). > @@ -4579,7 +3979,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculated_columns() +tom.all_calculated_columns() ``` ### Returns @@ -4591,7 +3991,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculated_tables() +tom.all_calculated_tables() ``` ### Returns @@ -4603,7 +4003,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculation_groups() +tom.all_calculation_groups() ``` ### Returns @@ -4615,7 +4015,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_calculation_items() +tom.all_calculation_items() ``` ### Returns @@ -4627,7 +4027,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_columns() +tom.all_columns() ``` ### Returns @@ -4639,7 +4039,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_date_tables() +tom.all_date_tables() ``` ### Returns @@ -4651,7 +4051,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_hierarchies() +tom.all_hierarchies() ``` ### Returns @@ -4663,7 +4063,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_hybrid_tables() +tom.all_hybrid_tables() ``` ### Returns @@ -4675,7 +4075,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_levels() +tom.all_levels() ``` ### Returns @@ -4687,7 +4087,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_measures() +tom.all_measures() ``` ### Returns @@ -4699,7 +4099,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_partitions() +tom.all_partitions() ``` ### Returns @@ -4711,7 +4111,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.all_rls() +tom.all_rls() ``` ### Returns @@ -4723,28 +4123,27 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.apply_refresh_policy( - table_name='', - effective_date=None, # This parameter is optional - refresh=True, # This parameter is optional - max_parallelism=0, # This parameter is optional -) +tom.apply_refresh_policy(table_name=, effective_date=None, refresh=True, max_parallelism=0) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **effective_date** (DateTime, default=None) +> **effective_date** (Optional[datetime.datetime] = None) > >> Optional; The effective date that is used when calculating the partitioning scheme. > -> **refresh** (bool, default=True) +> **refresh** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = True) > >> Optional; An indication if partitions of the table should be refreshed or not; the default behavior is to do the refresh. > -> **max_parallelism** (int, default=0) +> **max_parallelism** (Optional[int] = 0) > >> Optional; The degree of parallelism during the refresh execution. > @@ -4755,13 +4154,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.cardinality( - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +tom.cardinality(column=) ``` ### Parameters -> **column** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **column** ('TOM.Column') > >> Required; The column object within the semantic model. > @@ -4774,13 +4175,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.clear_annotations( - object=, -) +tom.clear_annotations(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > @@ -4791,13 +4194,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.clear_extended_properties( - object=, -) +tom.clear_extended_properties(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > @@ -4808,13 +4213,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.data_size( - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +tom.data_size(column=) ``` ### Parameters -> **column** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **column** ('TOM.Column') > >> Required; The column object within the semantic model. > @@ -4827,18 +4234,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.depends_on( - object=, - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.depends_on(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; The TOM object within the semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -4851,13 +4259,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.dictionary_size( - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +tom.dictionary_size(column=) ``` ### Parameters -> **column** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **column** ('TOM.Column') > >> Required; The column object within the semantic model. > @@ -4870,18 +4280,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.fully_qualified_measures( - object=tom.model.Tables["Sales"].Measures["Sales Amount"], - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.fully_qualified_measures(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** ('TOM.Measure') > >> Required; The TOM object within the semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -4894,23 +4305,24 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.get_annotation_value( - object=, - name='', -) +tom.get_annotation_value(object=, name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **name** (str) +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the annotation. > ### Returns -> str; The annotation value. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The annotation value. ### [get_annotations](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.get_annotations) #### Shows all [annotations](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.annotation?view=analysisservices-dotnet) for a given object within a semantic model. @@ -4918,13 +4330,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.get_annotations( - object=, -) +tom.get_annotations(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > @@ -4937,13 +4351,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.get_extended_properties( - object=, -) +tom.get_extended_properties(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > @@ -4956,23 +4372,24 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.get_extended_property_value( - object=, - name='', -) +tom.get_extended_property_value(object=, name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **name** (str) +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the annotation. > ### Returns -> str; The extended property value. +> [str](https://docs.python.org/3/library/stdtypes.html#str); The extended property value. ### [has_aggs](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_aggs) #### Identifies if a semantic model has any [aggregations](https://learn.microsoft.com/power-bi/transform-model/aggregations-advanced). @@ -4980,11 +4397,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_aggs() +tom.has_aggs() ``` ### Returns -> bool; Indicates if the semantic model has any aggregations. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the semantic model has any aggregations. ### [has_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_date_table) #### Identifies if a semantic model has a table marked as a [date table](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables). @@ -4992,11 +4409,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_date_table() +tom.has_date_table() ``` ### Returns -> bool; Indicates if the semantic model has a table marked as a date table. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the semantic model has a table marked as a date table. ### [has_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_hybrid_table) #### Identifies if a semantic model has a [hybrid table](https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables). @@ -5004,11 +4421,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_hybrid_table() +tom.has_hybrid_table() ``` ### Returns -> bool; Indicates if the semantic model has a hybrid table. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the semantic model has a hybrid table. ### [has_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.has_incremental_refresh_policy) #### Identifies whether a table has an [incremental refresh](https://learn.microsoft.com/power-bi/connect-data/incremental-refresh-overview) policy. @@ -5016,18 +4433,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.has_incremental_refresh_policy( - table_name='', -) +tom.has_incremental_refresh_policy(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; An indicator whether a table has an incremental refresh policy. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); An indicator whether a table has an incremental refresh policy. ### [in_perspective](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.in_perspective) #### Indicates whether an object is contained within a given [perspective](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.perspective?view=analysisservices-dotnet). @@ -5035,23 +4454,24 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.in_perspective( - object=, - perspective_name='', -) +tom.in_perspective(object=, perspective_name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure'), ForwardRef('TOM.Hierarchy')]) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **perspecitve_name** (str) +> **perspective_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > ->> Required; Name of the perspective. +>> Required; None > ### Returns -> bool; An indication as to whether the object is contained within the given perspective. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); An indication as to whether the object is contained within the given perspective. ### [is_agg_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_agg_table) #### Identifies if a table has [aggregations](https://learn.microsoft.com/power-bi/transform-model/aggregations-advanced). @@ -5059,18 +4479,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_agg_table( - table_name='', -) +tom.is_agg_table(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; Indicates if the table has any aggregations. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the table has any aggregations. ### [is_auto_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_auto_date_table) #### Identifies if a table is an `auto date/time table `_. @@ -5078,18 +4500,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_auto_date_table( - table_name='', -) +tom.is_auto_date_table(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; Indicates if the table is an auto-date table. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the table is an auto-date table. ### [is_calculated_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_calculated_table) #### Identifies if a table is a calculated table. @@ -5097,18 +4521,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_calculated_table( - table_name='', -) +tom.is_calculated_table(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; A boolean value indicating whether the table is a calculated table. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); A boolean value indicating whether the table is a calculated table. ### [is_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_date_table) #### Identifies if a table is marked as a [date tables](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables). @@ -5116,18 +4542,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_date_table( - table_name='', -) +tom.is_date_table(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; Indicates if the table is marked as a date table. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the table is marked as a date table. ### [is_direct_lake](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake) #### Identifies if a semantic model is in [Direct Lake](https://learn.microsoft.com/fabric/get-started/direct-lake-overview) mode. @@ -5135,11 +4563,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_direct_lake() +tom.is_direct_lake() ``` ### Returns -> bool; Indicates if the semantic model is in Direct Lake mode. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the semantic model is in Direct Lake mode. ### [is_direct_lake_using_view](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_direct_lake_using_view) #### Identifies whether a semantic model is in Direct lake mode and uses views from the lakehouse. @@ -5147,11 +4575,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_direct_lake_using_view() +tom.is_direct_lake_using_view() ``` ### Returns -> bool; An indicator whether a semantic model is in Direct lake mode and uses views from the lakehouse. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); An indicator whether a semantic model is in Direct lake mode and uses views from the lakehouse. ### [is_field_parameter](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_field_parameter) #### Identifies if a table is a [field parameter](https://learn.microsoft.com/power-bi/create-reports/power-bi-field-parameters). @@ -5159,18 +4587,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_field_parameter( - table_name='', -) +tom.is_field_parameter(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; Indicates if the table is a field parameter. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the table is a field parameter. ### [is_hybrid_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.is_hybrid_table) #### Identifies if a table is a [hybrid table](https://learn.microsoft.com/power-bi/connect-data/service-dataset-modes-understand#hybrid-tables). @@ -5178,18 +4608,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.is_hybrid_table( - table_name='', -) +tom.is_hybrid_table(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > ### Returns -> bool; Indicates if the table is a hybrid table. +> [bool](https://docs.python.org/3/library/stdtypes.html#bool); Indicates if the table is a hybrid table. ### [mark_as_date_table](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.mark_as_date_table) #### Marks a table as a [date table](https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables). @@ -5197,18 +4629,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.mark_as_date_table( - table_name='', - column_name='', -) +tom.mark_as_date_table(table_name=, column_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the date column in the table. > @@ -5219,13 +4652,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.records_per_segment( - object=tom.model.Tables["Sales"].Partitions["Sales"], -) +tom.records_per_segment(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** ('TOM.Partition') > >> Required; The partition object within the semantic model. > @@ -5238,18 +4673,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.referenced_by( - object=, - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.referenced_by(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; The TOM object within the semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -5262,18 +4698,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_alternate_of( - table_name='', - column_name='', -) +tom.remove_alternate_of(table_name=, column_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > @@ -5284,18 +4721,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_annotation( - object=, - name='', -) +tom.remove_annotation(object=, name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **name** (str) +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the annotation. > @@ -5306,18 +4744,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_extended_property( - object=, - name='', -) +tom.remove_extended_property(object=, name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **name** (str) +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the annotation. > @@ -5328,18 +4767,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_from_perspective( - object=, - perspective_name='', -) +tom.remove_from_perspective(object=, perspective_name=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure'), ForwardRef('TOM.Hierarchy')]) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **perspective_name** (str) +> **perspective_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the perspective. > @@ -5350,13 +4790,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_object( - object=, -) +tom.remove_object(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > @@ -5367,18 +4809,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_sort_by_column( - table_name='', - column_name='', -) +tom.remove_sort_by_column(table_name=, column_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > @@ -5389,18 +4832,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_translation( - object=, - language='', -) +tom.remove_translation(object=, language=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure'), ForwardRef('TOM.Hierarchy'), ForwardRef('TOM.Level')]) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **language** (str) +> **language** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The language code. > @@ -5410,7 +4854,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.remove_vertipaq_annotations() +tom.remove_vertipaq_annotations() ``` ### [row_count](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.row_count) @@ -5420,13 +4864,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.row_count( - object=tom.model.Tables["Sales"], -) +tom.row_count(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Partition'), ForwardRef('TOM.Table')]) > >> Required; The table/partition object within the semantic model. > @@ -5439,18 +4885,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_aggregations( - table_name='', - agg_table_name='', -) +tom.set_aggregations(table_name=, agg_table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the base table. > -> **agg_table_name** (str) +> **agg_table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the aggregation table. > @@ -5461,34 +4908,32 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_alternate_of( - table_name='', - column_name='', - summarization_type='', - base_table='', - base_column=None, # This parameter is optional -) +tom.set_alternate_of(table_name=, column_name=, summarization_type=, base_table=, base_column=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **summarization_type** (str) +> **summarization_type** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The summarization type for the column. [Summarization valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.alternateof.summarization?view=analysisservices-dotnet#microsoft-analysisservices-tabular-alternateof-summarization) > -> **base_table** (str) +> **base_table** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the base table for aggregation. > -> **base_column** (str) +> **base_column** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Name of the base column for aggregation > @@ -5499,23 +4944,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_annotation( - object=, - name='', - value='', -) +tom.set_annotation(object=, name=, value=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **name** (str) +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the annotation. > -> **value** (str) +> **value** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Value of the annotation. > @@ -5526,23 +4971,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_data_coverage_definition( - table_name='', - partition_name='', - expression='', -) +tom.set_data_coverage_definition(table_name=, partition_name=, expression=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **partition_name** (str) +> **partition_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the partition. > -> **expression** (str) +> **expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; DAX expression containing the logic for the data coverage definition. > @@ -5553,23 +4998,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_data_type( - table_name='', - column_name='', - value='', -) +tom.set_data_type(table_name=, column_name=, value=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **value** (str) +> **value** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The data type. [Data type valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.datatype?view=analysisservices-dotnet) @@ -5581,13 +5026,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_direct_lake_behavior( - direct_lake_behavior='', -) +tom.set_direct_lake_behavior(direct_lake_behavior=) ``` ### Parameters -> **direct_lake_behavior** (str) +> **self** (self) +> +>> Required; None +> +> **direct_lake_behavior** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DirectLakeBehavior property value. [DirectLakeBehavior valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.directlakebehavior?view=analysisservices-dotnet) @@ -5599,23 +5046,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_encoding_hint( - table_name='', - column_name='', - value='', -) +tom.set_encoding_hint(table_name=, column_name=, value=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **value** (str) +> **value** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Encoding hint value. [Encoding hint valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.encodinghinttype?view=analysisservices-dotnet) @@ -5627,29 +5074,28 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_extended_property( - object=, - extended_property_type='', - name='', - value='', -) +tom.set_extended_property(object=, extended_property_type=, name=, value=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (object) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **extended_property_type** (str) +> **extended_property_type** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The extended property type. [Extended property valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.extendedpropertytype?view=analysisservices-dotnet) > -> **name** (str) +> **name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the extended property. > -> **value** (str) +> **value** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Value of the extended property. > @@ -5660,23 +5106,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_is_available_in_mdx( - table_name='', - column_name='', - value=False, # This parameter is optional -) +tom.set_is_available_in_mdx(table_name=, column_name=, value=False) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **value** (bool, default=False) +> **value** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; The IsAvailableInMdx property value. > @@ -5687,24 +5133,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_kpi( - measure_name='', - target='', - lower_bound='', - upper_bound='', - lower_mid_bound=None, # This parameter is optional - upper_mid_bound=None, # This parameter is optional - status_type=None, # This parameter is optional - status_graphic=None, # This parameter is optional -) +tom.set_kpi(measure_name=, target=, lower_bound=, upper_bound=, lower_mid_bound=None, upper_mid_bound=None, status_type=None, status_graphic=None) ``` ### Parameters -> **measure_name** (str) +> **self** (self) +> +>> Required; None +> +> **measure_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the measure. > -> **target** (str, int, float) +> **target** (Union[int, float, [str](https://docs.python.org/3/library/stdtypes.html#str)]) > >> Required; The target for the KPI. This can either be a number or the name of a different measure in the semantic model. > @@ -5716,20 +5157,20 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: > >> Required; The upper bound for the KPI. > -> **lower_mid_bound** (float, default=None) +> **lower_mid_bound** (Optional[float] = None) > >> Optional; The lower-mid bound for the KPI. Set this if status_type is 'Centered' or 'CenteredReversed'. > -> **upper_mid_bound** (float, default=None) +> **upper_mid_bound** (Optional[float] = None) > >> Optional; The upper-mid bound for the KPI. Set this if status_type is 'Centered' or 'CenteredReversed'. > -> **status_type** (str, default=None) +> **status_type** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The status type of the KPI. Options: 'Linear', 'LinearReversed', 'Centered', 'CenteredReversed'. Defaults to None which resolvs to 'Linear'. > -> **status_graphic** (str, default=None) +> **status_graphic** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The status graphic for the KPI. Defaults to 'Three Circles Colored'. @@ -5741,28 +5182,27 @@ Defaults to 'Three Circles Colored'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_ols( - role_name='', - table_name='', - column_name='', - permission='', -) +tom.set_ols(role_name=, table_name=, column_name=, permission=) ``` ### Parameters -> **role_name** (str) +> **self** (self) +> +>> Required; None +> +> **role_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the role. > -> **table_name** (str) +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **permission** (str) +> **permission** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The object level security permission for the column. [Permission valid values](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.metadatapermission?view=analysisservices-dotnet) @@ -5774,23 +5214,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_rls( - role_name='', - table_name='', - filter_expression='', -) +tom.set_rls(role_name=, table_name=, filter_expression=) ``` ### Parameters -> **role_name** (str) +> **self** (self) +> +>> Required; None +> +> **role_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the role. > -> **table_name** (str) +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **filter_expression** (str) +> **filter_expression** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The DAX expression containing the row level security filter expression logic. > @@ -5801,23 +5241,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_sort_by_column( - table_name='', - column_name='', - sort_by_column='', -) +tom.set_sort_by_column(table_name=, column_name=, sort_by_column=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **sort_by_column** (str) +> **sort_by_column** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column to use for sorting. Must be of integer (Int64) data type. > @@ -5828,23 +5268,23 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_summarize_by( - table_name='', - column_name='', - value='Default', # This parameter is optional -) +tom.set_summarize_by(table_name=, column_name=, value=Default) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **value** (bool, default=None) +> **value** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = 'Default') > >> Optional; The SummarizeBy property value. Defaults to none which resolves to 'Default'. @@ -5857,28 +5297,27 @@ Defaults to none which resolves to 'Default'. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_translation( - object=tom.model.Tables["Sales"], - language='', - property='', - value='', -) +tom.set_translation(object=, language=, property=, value=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure'), ForwardRef('TOM.Hierarchy'), ForwardRef('TOM.Level')]) > >> Required; An object (i.e. table/column/measure) within a semantic model. > -> **language** (str) +> **language** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The language code. > -> **property** (str) +> **property** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The property to set. Options: 'Name', 'Description', 'Display Folder'. > -> **value** (str) +> **value** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; The transation value. > @@ -5888,7 +5327,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.set_vertipaq_annotations() +tom.set_vertipaq_annotations() ``` ### [show_incremental_refresh_policy](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.show_incremental_refresh_policy) @@ -5898,13 +5337,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.show_incremental_refresh_policy( - table_name='', -) +tom.show_incremental_refresh_policy(table_name=) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > @@ -5915,13 +5356,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.total_size( - object=tom.model.Tables["Sales"], -) +tom.total_size(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column')]) > >> Required; The table/column object within the semantic model. > @@ -5934,18 +5377,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.unqualified_columns( - object=, - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.unqualified_columns(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** ('TOM.Column') > >> Required; The TOM object within the semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -5958,41 +5402,38 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.update_calculation_item( - table_name='', - calculation_item_name='', - expression=None, # This parameter is optional - ordinal=None, # This parameter is optional - description=None, # This parameter is optional - format_string_expression=None, # This parameter is optional -) +tom.update_calculation_item(table_name=, calculation_item_name=, expression=None, ordinal=None, description=None, format_string_expression=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the calculation group (table). > -> **calculation_item_name** (str) +> **calculation_item_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the calculation item. > -> **expression** (str, default=None) +> **expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The DAX expression of the calculation item. Defaults to None which keeps the existing setting. > -> **ordinal** (int, default=None) +> **ordinal** (Optional[int] = None) > >> Optional; The ordinal of the calculation item. Defaults to None which keeps the existing setting. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The description of the role. Defaults to None which keeps the existing setting. > -> **format_string_expression** (str, default=None) +> **format_string_expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The format string expression for the calculation item. Defaults to None which keeps the existing setting. @@ -6004,77 +5445,68 @@ Defaults to None which keeps the existing setting. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.update_column( - table_name='', - column_name='', - source_column=None, # This parameter is optional - data_type=None, # This parameter is optional - expression=None, # This parameter is optional - format_string=None, # This parameter is optional - hidden=None, # This parameter is optional - description=None, # This parameter is optional - display_folder=None, # This parameter is optional - data_category=None, # This parameter is optional - key=None, # This parameter is optional - summarize_by=None, # This parameter is optional -) +tom.update_column(table_name=, column_name=, source_column=None, data_type=None, expression=None, format_string=None, hidden=None, description=None, display_folder=None, data_category=None, key=None, summarize_by=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table in which the column exists. > -> **column_name** (str) +> **column_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the column. > -> **source_column** (str, default=None) +> **source_column** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The source column for the column (for data columns only). Defaults to None which keeps the existing setting. > -> **data_type** (str, default=None) +> **data_type** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The data type of the column. Defaults to None which keeps the existing setting. > -> **expression** (str, default=None) +> **expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The DAX expression of the column (for calculated columns only). Defaults to None which keeps the existing setting. > -> **format_string** (str, default=None) +> **format_string** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Format string of the column. Defaults to None which keeps the existing setting. > -> **hidden** (bool, default=None) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; Whether the column will be hidden or visible. Defaults to None which keeps the existing setting. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the column. Defaults to None which keeps the existing setting. > -> **display_folder** (str, default=None) +> **display_folder** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The display folder in which the column will reside. Defaults to None which keeps the existing setting. > -> **data_category** (str, default=None) +> **data_category** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The data category of the column. Defaults to None which keeps the existing setting. > -> **key** (bool, default=False) +> **key** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; Marks the column as the primary key of the table. Defaults to None which keeps the existing setting. > -> **summarize_by** (str, default=None) +> **summarize_by** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Sets the value for the Summarize By property of the column. Defaults to None which keeps the existing setting. @@ -6086,23 +5518,19 @@ Defaults to None which keeps the existing setting. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.update_incremental_refresh_policy( - table_name='', - incremental_granularity='', - incremental_periods='', - rolling_window_granularity='', - rolling_window_periods='', - only_refresh_complete_days=False, # This parameter is optional - detect_data_changes_column=None, # This parameter is optional -) +tom.update_incremental_refresh_policy(table_name=, incremental_granularity=, incremental_periods=, rolling_window_granularity=, rolling_window_periods=, only_refresh_complete_days=False, detect_data_changes_column=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **incremental_granularity** (str) +> **incremental_granularity** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Granularity of the (most recent) incremental refresh range. > @@ -6110,7 +5538,7 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: > >> Required; Number of periods for the incremental refresh range. > -> **rolling_window_granularity** (str) +> **rolling_window_granularity** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Target granularity of the rolling window for the whole semantic model. > @@ -6118,11 +5546,11 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: > >> Required; Number of periods for the rolling window for the whole semantic model. > -> **only_refresh_complete_days** (bool, default=False) +> **only_refresh_complete_days** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = False) > >> Optional; Lag or leading periods from Now() to the rolling window head. > -> **detect_data_changes_column** (str, default=None) +> **detect_data_changes_column** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The column to use for detecting data changes. Defaults to None which resolves to not detecting data changes. @@ -6134,35 +5562,33 @@ Defaults to None which resolves to not detecting data changes. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.update_m_partition( - table_name='', - partition_name='', - expression=None, # This parameter is optional - mode=None, # This parameter is optional - description=None, # This parameter is optional -) +tom.update_m_partition(table_name=, partition_name=, expression=None, mode=None, description=None) ``` ### Parameters -> **table_name** (str) +> **self** (self) +> +>> Required; None +> +> **table_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the table. > -> **partition_name** (str) +> **partition_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the partition. > -> **expression** (str, default=None) +> **expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The [M expression](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.mpartitionsource.expression?view=analysisservices-dotnet) containing the logic for the partition. Defaults to None which keeps the existing setting. > -> **mode** (str, default=None) +> **mode** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The query [mode](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.modetype?view=analysisservices-dotnet) of the partition. Defaults to None which keeps the existing setting. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The description of the partition. Defaults to None which keeps the existing setting. @@ -6174,48 +5600,44 @@ Defaults to None which keeps the existing setting. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.update_measure( - measure_name='', - expression=None, # This parameter is optional - format_string=None, # This parameter is optional - hidden=None, # This parameter is optional - description=None, # This parameter is optional - display_folder=None, # This parameter is optional - format_string_expression=None, # This parameter is optional -) +tom.update_measure(measure_name=, expression=None, format_string=None, hidden=None, description=None, display_folder=None, format_string_expression=None) ``` ### Parameters -> **measure_name** (str) +> **self** (self) +> +>> Required; None +> +> **measure_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the measure. > -> **expression** (str, default=None) +> **expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; DAX expression of the measure. Defaults to None which keeps the existing setting. > -> **format_string** (str, default=None) +> **format_string** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; Format string of the measure. Defaults to None which keeps the existing setting. > -> **hidden** (bool, default=None) +> **hidden** (Optional[[bool](https://docs.python.org/3/library/stdtypes.html#bool)] = None) > >> Optional; Whether the measure will be hidden or visible. Defaults to None which keeps the existing setting. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; A description of the measure. Defaults to None which keeps the existing setting. > -> **display_folder** (str, default=None) +> **display_folder** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The display folder in which the measure will reside. Defaults to None which keeps the existing setting. > -> **format_string_expression** (str, default=None) +> **format_string_expression** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The format string expression for the calculation item. Defaults to None which keeps the existing setting. @@ -6227,24 +5649,24 @@ Defaults to None which keeps the existing setting. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.update_role( - role_name='', - model_permission=None, # This parameter is optional - description=None, # This parameter is optional -) +tom.update_role(role_name=, model_permission=None, description=None) ``` ### Parameters -> **role_name** (str) +> **self** (self) +> +>> Required; None +> +> **role_name** ([str](https://docs.python.org/3/library/stdtypes.html#str)) > >> Required; Name of the role. > -> **model_permission** (str, default=None) +> **model_permission** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The model permission for the role. Defaults to None which keeps the existing setting. > -> **description** (str, default=None) +> **description** (Optional[[str](https://docs.python.org/3/library/stdtypes.html#str)] = None) > >> Optional; The description of the role. Defaults to None which keeps the existing setting. @@ -6256,18 +5678,19 @@ Defaults to None which keeps the existing setting. import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_calc_item( - object=, - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.used_in_calc_item(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure')]) > >> Required; An object (i.e. table/column) within a semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -6280,18 +5703,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_data_coverage_definition( - object=, - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.used_in_data_coverage_definition(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure')]) > >> Required; An object (i.e. table/column) within a semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -6304,15 +5728,17 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_hierarchies( - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +tom.used_in_hierarchies(column=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) > ->> Required; An column object within a semantic model. +>> Required; None +> +> **column** ('TOM.Column') +> +>> Required; None > ### Returns > Microsoft.AnalysisServices.Tabular.HierarchyCollection; All hierarchies in which the column is used. @@ -6323,15 +5749,17 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_levels( - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +tom.used_in_levels(column=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None > ->> Required; An column object within a semantic model. +> **column** ('TOM.Column') +> +>> Required; None > ### Returns > Microsoft.AnalysisServices.Tabular.LevelCollection; All levels in which the column is used. @@ -6342,13 +5770,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_relationships( - object=tom.model.Tables["Sales"], -) +tom.used_in_relationships(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column')]) > >> Required; An object (i.e. table/column) within a semantic model. > @@ -6361,18 +5791,19 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_rls( - object=tom.model.Tables["Sales"], - dependencies=labs.get_model_calc_dependencies(dataset=tom._dataset, workspace=tom._workspace), -) +tom.used_in_rls(object=, dependencies=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Table'), ForwardRef('TOM.Column'), ForwardRef('TOM.Measure')]) > >> Required; An object (i.e. table/column) within a semantic model. > -> **dependencies** (pandas.DataFrame) +> **dependencies** (pandas.core.frame.DataFrame) > >> Required; A pandas dataframe with the output of the 'get_model_calc_dependencies' function. > @@ -6385,15 +5816,17 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_in_sort_by( - column=tom.model.Tables["Geography"].Columns["GeographyKey"], -) +tom.used_in_sort_by(column=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None > ->> Required; An column object within a semantic model. +> **column** ('TOM.Column') +> +>> Required; None > ### Returns > Microsoft.AnalysisServices.Tabular.ColumnCollection; All columns in which the column is used for sorting. @@ -6404,13 +5837,15 @@ with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: import sempy_labs as labs from sempy_labs.tom import connect_semantic_model with connect_semantic_model(dataset='', workspace='', readonly=True) as tom: - tom.used_size( - object=tom.model.Tables["Geography"].Hierarchies["Geo Hierarchy"], -) +tom.used_size(object=) ``` ### Parameters -> **object** (TOM Object) +> **self** (self) +> +>> Required; None +> +> **object** (Union[ForwardRef('TOM.Hierarchy'), ForwardRef('TOM.Relationship')]) > >> Required; The hierarhcy/relationship object within the semantic model. > diff --git a/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py b/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py index 56a9e112..131b2fda 100644 --- a/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +++ b/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py @@ -11,8 +11,7 @@ def show_unsupported_direct_lake_objects( dataset: str, workspace: Optional[str] = None ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]: """ - Returns a list of a semantic model's objects which are not supported by Direct Lake based on - `official documentation `_. + Returns a list of a semantic model's objects which are not supported by Direct Lake based on `official documentation `_. Parameters ---------- From 108cfa028a7ba098dda7924525c0103b0a5a989f Mon Sep 17 00:00:00 2001 From: Michael Date: Sat, 7 Sep 2024 21:14:10 +0300 Subject: [PATCH 06/18] added index.md within docs --- .github/workflows/_generate_func_examples.py | 5 +- .github/workflows/_generate_readme.py | 2 +- README.md | 462 +++++++++--------- .../function_examples.md | 3 +- docs/index.md | 1 + 5 files changed, 238 insertions(+), 235 deletions(-) rename function_examples.md => docs/function_examples.md (99%) create mode 100644 docs/index.md diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py index dbdaf505..3b1cdaa6 100644 --- a/.github/workflows/_generate_func_examples.py +++ b/.github/workflows/_generate_func_examples.py @@ -71,7 +71,8 @@ def format_docstring_description(description): return re.sub(pattern_desc, r'[\1](\2)', str(description)) -markdown_example = '## Function Examples\n' +markdown_example = '# Welcome to the Semantic Link Labs Function Examples!\n\n' +#markdown_example = '## Function Examples\n' # Gather necessary ingredients into a dictionary func_dict = {} @@ -129,6 +130,6 @@ def format_docstring_description(description): markdown_example += f"\n### Returns\n> {return_type}; {ret.description}" # Write to file -output_path = '/root/semantic-link-labs/function_examples.md' +output_path = '/root/semantic-link-labs/docs/function_examples.md' with open(output_path, 'w') as f: f.write(markdown_example) diff --git a/.github/workflows/_generate_readme.py b/.github/workflows/_generate_readme.py index 7648db16..3815b2db 100644 --- a/.github/workflows/_generate_readme.py +++ b/.github/workflows/_generate_readme.py @@ -161,7 +161,7 @@ else: sorted_category_to_funcs[category] = sorted(category_to_funcs[category]) -prefix = "https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#" +prefix = "https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#" markdown_content += '## Function Categories\n' for category, funcs in sorted_category_to_funcs.items(): diff --git a/README.md b/README.md index 21fcf1c7..5aea2941 100644 --- a/README.md +++ b/README.md @@ -57,288 +57,288 @@ An even better way to ensure the semantic-link-labs library is available in your ## Function Categories ### Admin -* [add_user_to_workspace](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_user_to_workspace) -* [assign_workspace_to_capacity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#assign_workspace_to_capacity) -* [assign_workspace_to_dataflow_storage](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#assign_workspace_to_dataflow_storage) -* [delete_user_from_workspace](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#delete_user_from_workspace) -* [deprovision_workspace_identity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#deprovision_workspace_identity) -* [list_workspace_role_assignments](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_workspace_role_assignments) -* [list_workspace_users](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_workspace_users) -* [provision_workspace_identity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#provision_workspace_identity) -* [set_workspace_default_storage_format](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_workspace_default_storage_format) -* [unassign_workspace_from_capacity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#unassign_workspace_from_capacity) -* [update_workspace_user](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_workspace_user) +* [add_user_to_workspace](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_user_to_workspace) +* [assign_workspace_to_capacity](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#assign_workspace_to_capacity) +* [assign_workspace_to_dataflow_storage](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#assign_workspace_to_dataflow_storage) +* [delete_user_from_workspace](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#delete_user_from_workspace) +* [deprovision_workspace_identity](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#deprovision_workspace_identity) +* [list_workspace_role_assignments](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_workspace_role_assignments) +* [list_workspace_users](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_workspace_users) +* [provision_workspace_identity](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#provision_workspace_identity) +* [set_workspace_default_storage_format](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_workspace_default_storage_format) +* [unassign_workspace_from_capacity](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#unassign_workspace_from_capacity) +* [update_workspace_user](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_workspace_user) ### Deployment Pipelines -* [list_deployment_pipeline_stage_items](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_deployment_pipeline_stage_items) -* [list_deployment_pipeline_stages](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_deployment_pipeline_stages) -* [list_deployment_pipelines](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_deployment_pipelines) +* [list_deployment_pipeline_stage_items](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_deployment_pipeline_stage_items) +* [list_deployment_pipeline_stages](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_deployment_pipeline_stages) +* [list_deployment_pipelines](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_deployment_pipelines) ### Direct Lake -* [add_table_to_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_table_to_direct_lake_semantic_model) -* [check_fallback_reason](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#check_fallback_reason) -* [direct_lake_schema_compare](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#direct_lake_schema_compare) -* [direct_lake_schema_sync](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#direct_lake_schema_sync) -* [generate_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#generate_direct_lake_semantic_model) -* [get_direct_lake_guardrails](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_guardrails) -* [get_direct_lake_lakehouse](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_lakehouse) -* [get_direct_lake_source](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_source) -* [get_direct_lake_sql_endpoint](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_direct_lake_sql_endpoint) -* [get_directlake_guardrails_for_sku](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_directlake_guardrails_for_sku) -* [get_shared_expression](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_shared_expression) -* [get_sku_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_sku_size) -* [list_direct_lake_model_calc_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_direct_lake_model_calc_tables) -* [show_unsupported_direct_lake_objects](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#show_unsupported_direct_lake_objects) -* [update_direct_lake_model_lakehouse_connection](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_direct_lake_model_lakehouse_connection) -* [update_direct_lake_partition_entity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_direct_lake_partition_entity) -* [warm_direct_lake_cache_isresident](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#warm_direct_lake_cache_isresident) -* [warm_direct_lake_cache_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#warm_direct_lake_cache_perspective) +* [add_table_to_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_table_to_direct_lake_semantic_model) +* [check_fallback_reason](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#check_fallback_reason) +* [direct_lake_schema_compare](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#direct_lake_schema_compare) +* [direct_lake_schema_sync](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#direct_lake_schema_sync) +* [generate_direct_lake_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#generate_direct_lake_semantic_model) +* [get_direct_lake_guardrails](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_direct_lake_guardrails) +* [get_direct_lake_lakehouse](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_direct_lake_lakehouse) +* [get_direct_lake_source](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_direct_lake_source) +* [get_direct_lake_sql_endpoint](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_direct_lake_sql_endpoint) +* [get_directlake_guardrails_for_sku](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_directlake_guardrails_for_sku) +* [get_shared_expression](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_shared_expression) +* [get_sku_size](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_sku_size) +* [list_direct_lake_model_calc_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_direct_lake_model_calc_tables) +* [show_unsupported_direct_lake_objects](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#show_unsupported_direct_lake_objects) +* [update_direct_lake_model_lakehouse_connection](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_direct_lake_model_lakehouse_connection) +* [update_direct_lake_partition_entity](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_direct_lake_partition_entity) +* [warm_direct_lake_cache_isresident](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#warm_direct_lake_cache_isresident) +* [warm_direct_lake_cache_perspective](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#warm_direct_lake_cache_perspective) ### Direct Lake Migration -* [create_pqt_file](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_pqt_file) -* [migrate_calc_tables_to_lakehouse](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_calc_tables_to_lakehouse) -* [migrate_calc_tables_to_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_calc_tables_to_semantic_model) -* [migrate_field_parameters](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_field_parameters) -* [migrate_model_objects_to_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_model_objects_to_semantic_model) -* [migrate_tables_columns_to_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migrate_tables_columns_to_semantic_model) -* [migration_validation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#migration_validation) -* [refresh_calc_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#refresh_calc_tables) +* [create_pqt_file](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_pqt_file) +* [migrate_calc_tables_to_lakehouse](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#migrate_calc_tables_to_lakehouse) +* [migrate_calc_tables_to_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#migrate_calc_tables_to_semantic_model) +* [migrate_field_parameters](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#migrate_field_parameters) +* [migrate_model_objects_to_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#migrate_model_objects_to_semantic_model) +* [migrate_tables_columns_to_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#migrate_tables_columns_to_semantic_model) +* [migration_validation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#migration_validation) +* [refresh_calc_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#refresh_calc_tables) ### General -* [create_warehouse](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_warehouse) -* [get_notebook_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_notebook_definition) -* [import_notebook_from_web](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#import_notebook_from_web) -* [list_capacities](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_capacities) -* [list_dashboards](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_dashboards) -* [list_dataflow_storage_accounts](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_dataflow_storage_accounts) -* [list_dataflows](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_dataflows) -* [list_warehouses](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_warehouses) -* [update_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_item) +* [create_warehouse](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_warehouse) +* [get_notebook_definition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_notebook_definition) +* [import_notebook_from_web](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#import_notebook_from_web) +* [list_capacities](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_capacities) +* [list_dashboards](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_dashboards) +* [list_dataflow_storage_accounts](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_dataflow_storage_accounts) +* [list_dataflows](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_dataflows) +* [list_warehouses](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_warehouses) +* [update_item](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_item) ### Git -* [commit_to_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#commit_to_git) -* [connect_workspace_to_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#connect_workspace_to_git) -* [disconnect_workspace_from_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#disconnect_workspace_from_git) -* [get_git_connection](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_git_connection) -* [get_git_status](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_git_status) -* [initialize_git_connection](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#initialize_git_connection) -* [update_from_git](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_from_git) +* [commit_to_git](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#commit_to_git) +* [connect_workspace_to_git](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#connect_workspace_to_git) +* [disconnect_workspace_from_git](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#disconnect_workspace_from_git) +* [get_git_connection](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_git_connection) +* [get_git_status](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_git_status) +* [initialize_git_connection](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#initialize_git_connection) +* [update_from_git](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_from_git) ### Helper Functions -* [create_abfss_path](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_abfss_path) -* [create_relationship_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_relationship_name) -* [format_dax_object_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#format_dax_object_name) -* [generate_embedded_filter](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#generate_embedded_filter) -* [get_capacity_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_capacity_id) -* [get_capacity_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_capacity_name) -* [resolve_capacity_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_capacity_name) -* [resolve_dataset_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_dataset_id) -* [resolve_dataset_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_dataset_name) -* [resolve_item_type](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_item_type) -* [resolve_report_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_report_id) -* [resolve_report_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_report_name) -* [resolve_workspace_capacity](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_workspace_capacity) -* [save_as_delta_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#save_as_delta_table) +* [create_abfss_path](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_abfss_path) +* [create_relationship_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_relationship_name) +* [format_dax_object_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#format_dax_object_name) +* [generate_embedded_filter](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#generate_embedded_filter) +* [get_capacity_id](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_capacity_id) +* [get_capacity_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_capacity_name) +* [resolve_capacity_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_capacity_name) +* [resolve_dataset_id](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_dataset_id) +* [resolve_dataset_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_dataset_name) +* [resolve_item_type](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_item_type) +* [resolve_report_id](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_report_id) +* [resolve_report_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_report_name) +* [resolve_workspace_capacity](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_workspace_capacity) +* [save_as_delta_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#save_as_delta_table) ### Lakehouse -* [create_shortcut_onelake](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_shortcut_onelake) -* [delete_shortcut](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#delete_shortcut) -* [export_model_to_onelake](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#export_model_to_onelake) -* [get_lakehouse_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_lakehouse_columns) -* [get_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_lakehouse_tables) -* [lakehouse_attached](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#lakehouse_attached) -* [list_lakehouses](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_lakehouses) -* [list_shortcuts](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_shortcuts) -* [optimize_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#optimize_lakehouse_tables) -* [resolve_lakehouse_id](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_lakehouse_id) -* [resolve_lakehouse_name](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#resolve_lakehouse_name) -* [vacuum_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#vacuum_lakehouse_tables) +* [create_shortcut_onelake](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_shortcut_onelake) +* [delete_shortcut](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#delete_shortcut) +* [export_model_to_onelake](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#export_model_to_onelake) +* [get_lakehouse_columns](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_lakehouse_columns) +* [get_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_lakehouse_tables) +* [lakehouse_attached](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#lakehouse_attached) +* [list_lakehouses](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_lakehouses) +* [list_shortcuts](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_shortcuts) +* [optimize_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#optimize_lakehouse_tables) +* [resolve_lakehouse_id](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_lakehouse_id) +* [resolve_lakehouse_name](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#resolve_lakehouse_name) +* [vacuum_lakehouse_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#vacuum_lakehouse_tables) ### Model Optimization -* [import_vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#import_vertipaq_analyzer) -* [model_bpa_rules](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#model_bpa_rules) -* [run_model_bpa](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#run_model_bpa) -* [run_model_bpa_bulk](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#run_model_bpa_bulk) -* [vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#vertipaq_analyzer) +* [import_vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#import_vertipaq_analyzer) +* [model_bpa_rules](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#model_bpa_rules) +* [run_model_bpa](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#run_model_bpa) +* [run_model_bpa_bulk](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#run_model_bpa_bulk) +* [vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#vertipaq_analyzer) ### Query Scale Out -* [disable_qso](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#disable_qso) -* [list_qso_settings](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_qso_settings) -* [qso_sync](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#qso_sync) -* [qso_sync_status](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#qso_sync_status) -* [set_qso](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_qso) +* [disable_qso](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#disable_qso) +* [list_qso_settings](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_qso_settings) +* [qso_sync](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#qso_sync) +* [qso_sync_status](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#qso_sync_status) +* [set_qso](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_qso) ### Report -* [clone_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clone_report) -* [create_model_bpa_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_model_bpa_report) -* [create_report_from_reportjson](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_report_from_reportjson) -* [export_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#export_report) -* [get_report_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_report_definition) -* [get_report_json](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_report_json) -* [launch_report](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#launch_report) -* [report_rebind](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#report_rebind) -* [report_rebind_all](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#report_rebind_all) -* [update_report_from_reportjson](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_report_from_reportjson) +* [clone_report](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#clone_report) +* [create_model_bpa_report](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_model_bpa_report) +* [create_report_from_reportjson](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_report_from_reportjson) +* [export_report](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#export_report) +* [get_report_definition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_report_definition) +* [get_report_json](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_report_json) +* [launch_report](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#launch_report) +* [report_rebind](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#report_rebind) +* [report_rebind_all](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#report_rebind_all) +* [update_report_from_reportjson](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_report_from_reportjson) ### Semantic Model -* [backup_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#backup_semantic_model) -* [cancel_dataset_refresh](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#cancel_dataset_refresh) -* [clear_cache](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clear_cache) -* [copy_semantic_model_backup_file](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#copy_semantic_model_backup_file) -* [create_blank_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_blank_semantic_model) -* [create_model_bpa_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_model_bpa_semantic_model) -* [create_semantic_model_from_bim](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_semantic_model_from_bim) -* [deploy_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#deploy_semantic_model) -* [evaluate_dax_impersonation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#evaluate_dax_impersonation) -* [get_measure_dependencies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_measure_dependencies) -* [get_model_calc_dependencies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_model_calc_dependencies) -* [get_object_level_security](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_object_level_security) -* [get_semantic_model_bim](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_semantic_model_bim) -* [is_default_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_default_semantic_model) -* [list_reports_using_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_reports_using_semantic_model) -* [list_semantic_model_objects](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_semantic_model_objects) -* [measure_dependency_tree](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#measure_dependency_tree) -* [refresh_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#refresh_semantic_model) -* [restore_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#restore_semantic_model) -* [set_semantic_model_storage_format](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_semantic_model_storage_format) -* [translate_semantic_model](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#translate_semantic_model) +* [backup_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#backup_semantic_model) +* [cancel_dataset_refresh](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#cancel_dataset_refresh) +* [clear_cache](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#clear_cache) +* [copy_semantic_model_backup_file](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#copy_semantic_model_backup_file) +* [create_blank_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_blank_semantic_model) +* [create_model_bpa_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_model_bpa_semantic_model) +* [create_semantic_model_from_bim](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_semantic_model_from_bim) +* [deploy_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#deploy_semantic_model) +* [evaluate_dax_impersonation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#evaluate_dax_impersonation) +* [get_measure_dependencies](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_measure_dependencies) +* [get_model_calc_dependencies](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_model_calc_dependencies) +* [get_object_level_security](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_object_level_security) +* [get_semantic_model_bim](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_semantic_model_bim) +* [is_default_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_default_semantic_model) +* [list_reports_using_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_reports_using_semantic_model) +* [list_semantic_model_objects](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_semantic_model_objects) +* [measure_dependency_tree](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#measure_dependency_tree) +* [refresh_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#refresh_semantic_model) +* [restore_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#restore_semantic_model) +* [set_semantic_model_storage_format](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_semantic_model_storage_format) +* [translate_semantic_model](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#translate_semantic_model) ### Spark -* [create_custom_pool](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#create_custom_pool) -* [delete_custom_pool](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#delete_custom_pool) -* [get_spark_settings](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_spark_settings) -* [list_custom_pools](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#list_custom_pools) -* [update_custom_pool](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_custom_pool) -* [update_spark_settings](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_spark_settings) +* [create_custom_pool](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#create_custom_pool) +* [delete_custom_pool](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#delete_custom_pool) +* [get_spark_settings](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_spark_settings) +* [list_custom_pools](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#list_custom_pools) +* [update_custom_pool](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_custom_pool) +* [update_spark_settings](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_spark_settings) ### [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet)) #### 'Add' functions -* [add_calculated_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculated_column) -* [add_calculated_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculated_table) -* [add_calculated_table_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculated_table_column) -* [add_calculation_group](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculation_group) -* [add_calculation_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_calculation_item) -* [add_data_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_data_column) -* [add_entity_partition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_entity_partition) -* [add_expression](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_expression) -* [add_field_parameter](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_field_parameter) -* [add_hierarchy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_hierarchy) -* [add_m_partition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_m_partition) -* [add_measure](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_measure) -* [add_relationship](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_relationship) -* [add_role](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_role) -* [add_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_table) -* [add_time_intelligence](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_time_intelligence) +* [add_calculated_column](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_calculated_column) +* [add_calculated_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_calculated_table) +* [add_calculated_table_column](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_calculated_table_column) +* [add_calculation_group](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_calculation_group) +* [add_calculation_item](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_calculation_item) +* [add_data_column](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_data_column) +* [add_entity_partition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_entity_partition) +* [add_expression](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_expression) +* [add_field_parameter](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_field_parameter) +* [add_hierarchy](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_hierarchy) +* [add_m_partition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_m_partition) +* [add_measure](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_measure) +* [add_relationship](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_relationship) +* [add_role](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_role) +* [add_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_table) +* [add_time_intelligence](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_time_intelligence) #### 'All' functions -* [all_calculated_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculated_columns) -* [all_calculated_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculated_tables) -* [all_calculation_groups](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculation_groups) -* [all_calculation_items](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_calculation_items) -* [all_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_columns) -* [all_date_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_date_tables) -* [all_hierarchies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_hierarchies) -* [all_hybrid_tables](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_hybrid_tables) -* [all_levels](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_levels) -* [all_measures](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_measures) -* [all_partitions](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_partitions) -* [all_rls](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#all_rls) +* [all_calculated_columns](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_calculated_columns) +* [all_calculated_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_calculated_tables) +* [all_calculation_groups](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_calculation_groups) +* [all_calculation_items](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_calculation_items) +* [all_columns](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_columns) +* [all_date_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_date_tables) +* [all_hierarchies](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_hierarchies) +* [all_hybrid_tables](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_hybrid_tables) +* [all_levels](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_levels) +* [all_measures](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_measures) +* [all_partitions](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_partitions) +* [all_rls](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#all_rls) #### 'Remove' functions -* [remove_alternate_of](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_alternate_of) -* [remove_object](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_object) -* [remove_sort_by_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_sort_by_column) +* [remove_alternate_of](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_alternate_of) +* [remove_object](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_object) +* [remove_sort_by_column](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_sort_by_column) #### 'Set' functions -* [set_aggregations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_aggregations) -* [set_alternate_of](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_alternate_of) -* [set_data_coverage_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_data_coverage_definition) -* [set_data_type](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_data_type) -* [set_direct_lake_behavior](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_direct_lake_behavior) -* [set_encoding_hint](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_encoding_hint) -* [set_is_available_in_mdx](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_is_available_in_mdx) -* [set_kpi](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_kpi) -* [set_ols](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_ols) -* [set_rls](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_rls) -* [set_sort_by_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_sort_by_column) -* [set_summarize_by](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_summarize_by) +* [set_aggregations](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_aggregations) +* [set_alternate_of](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_alternate_of) +* [set_data_coverage_definition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_data_coverage_definition) +* [set_data_type](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_data_type) +* [set_direct_lake_behavior](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_direct_lake_behavior) +* [set_encoding_hint](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_encoding_hint) +* [set_is_available_in_mdx](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_is_available_in_mdx) +* [set_kpi](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_kpi) +* [set_ols](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_ols) +* [set_rls](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_rls) +* [set_sort_by_column](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_sort_by_column) +* [set_summarize_by](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_summarize_by) #### 'Update' functions -* [update_calculation_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_calculation_item) -* [update_column](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_column) -* [update_m_partition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_m_partition) -* [update_measure](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_measure) -* [update_role](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_role) +* [update_calculation_item](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_calculation_item) +* [update_column](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_column) +* [update_m_partition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_m_partition) +* [update_measure](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_measure) +* [update_role](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_role) #### 'Used-in' and dependency functions -* [depends_on](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#depends_on) -* [fully_qualified_measures](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#fully_qualified_measures) -* [referenced_by](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#referenced_by) -* [unqualified_columns](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#unqualified_columns) -* [used_in_calc_item](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_calc_item) -* [used_in_data_coverage_definition](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_data_coverage_definition) -* [used_in_hierarchies](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_hierarchies) -* [used_in_levels](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_levels) -* [used_in_relationships](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_relationships) -* [used_in_rls](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_rls) -* [used_in_sort_by](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_in_sort_by) +* [depends_on](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#depends_on) +* [fully_qualified_measures](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#fully_qualified_measures) +* [referenced_by](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#referenced_by) +* [unqualified_columns](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#unqualified_columns) +* [used_in_calc_item](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_calc_item) +* [used_in_data_coverage_definition](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_data_coverage_definition) +* [used_in_hierarchies](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_hierarchies) +* [used_in_levels](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_levels) +* [used_in_relationships](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_relationships) +* [used_in_rls](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_rls) +* [used_in_sort_by](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_in_sort_by) #### Annotations -* [clear_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clear_annotations) -* [get_annotation_value](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_annotation_value) -* [get_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_annotations) -* [remove_annotation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_annotation) -* [set_annotation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_annotation) +* [clear_annotations](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#clear_annotations) +* [get_annotation_value](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_annotation_value) +* [get_annotations](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_annotations) +* [remove_annotation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_annotation) +* [set_annotation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_annotation) #### Extended Properties -* [clear_extended_properties](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#clear_extended_properties) -* [get_extended_properties](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_extended_properties) -* [get_extended_property_value](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#get_extended_property_value) -* [remove_extended_property](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_extended_property) -* [set_extended_property](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_extended_property) +* [clear_extended_properties](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#clear_extended_properties) +* [get_extended_properties](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_extended_properties) +* [get_extended_property_value](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#get_extended_property_value) +* [remove_extended_property](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_extended_property) +* [set_extended_property](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_extended_property) #### Incremental Refresh -* [add_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_incremental_refresh_policy) -* [apply_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#apply_refresh_policy) -* [has_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_incremental_refresh_policy) -* [show_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#show_incremental_refresh_policy) -* [update_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#update_incremental_refresh_policy) +* [add_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_incremental_refresh_policy) +* [apply_refresh_policy](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#apply_refresh_policy) +* [has_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#has_incremental_refresh_policy) +* [show_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#show_incremental_refresh_policy) +* [update_incremental_refresh_policy](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#update_incremental_refresh_policy) #### Misc functions -* [has_aggs](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_aggs) -* [has_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_date_table) -* [has_hybrid_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#has_hybrid_table) -* [is_agg_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_agg_table) -* [is_auto_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_auto_date_table) -* [is_calculated_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_calculated_table) -* [is_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_date_table) -* [is_direct_lake](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_direct_lake) -* [is_direct_lake_using_view](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_direct_lake_using_view) -* [is_field_parameter](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_field_parameter) -* [is_hybrid_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#is_hybrid_table) -* [mark_as_date_table](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#mark_as_date_table) +* [has_aggs](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#has_aggs) +* [has_date_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#has_date_table) +* [has_hybrid_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#has_hybrid_table) +* [is_agg_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_agg_table) +* [is_auto_date_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_auto_date_table) +* [is_calculated_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_calculated_table) +* [is_date_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_date_table) +* [is_direct_lake](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_direct_lake) +* [is_direct_lake_using_view](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_direct_lake_using_view) +* [is_field_parameter](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_field_parameter) +* [is_hybrid_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#is_hybrid_table) +* [mark_as_date_table](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#mark_as_date_table) #### Perspectives -* [add_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_perspective) -* [add_to_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_to_perspective) -* [in_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#in_perspective) -* [remove_from_perspective](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_from_perspective) +* [add_perspective](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_perspective) +* [add_to_perspective](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_to_perspective) +* [in_perspective](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#in_perspective) +* [remove_from_perspective](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_from_perspective) #### Translations -* [add_translation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#add_translation) -* [remove_translation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_translation) -* [set_translation](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_translation) +* [add_translation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#add_translation) +* [remove_translation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_translation) +* [set_translation](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_translation) #### Vertipaq Stats -* [cardinality](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#cardinality) -* [data_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#data_size) -* [dictionary_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#dictionary_size) -* [records_per_segment](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#records_per_segment) -* [remove_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#remove_vertipaq_annotations) -* [row_count](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#row_count) -* [set_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#set_vertipaq_annotations) -* [total_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#total_size) -* [used_size](https://github.com/microsoft/semantic-link-labs/blob/main/function_examples.md#used_size) +* [cardinality](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#cardinality) +* [data_size](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#data_size) +* [dictionary_size](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#dictionary_size) +* [records_per_segment](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#records_per_segment) +* [remove_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#remove_vertipaq_annotations) +* [row_count](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#row_count) +* [set_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#set_vertipaq_annotations) +* [total_size](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#total_size) +* [used_size](https://github.com/microsoft/semantic-link-labs/tree/main/docs/function_examples.md#used_size) --- diff --git a/function_examples.md b/docs/function_examples.md similarity index 99% rename from function_examples.md rename to docs/function_examples.md index 53eb869d..3f1503e8 100644 --- a/function_examples.md +++ b/docs/function_examples.md @@ -1,4 +1,5 @@ -## Function Examples +# Welcome to the Semantic Link Labs Function Examples! + ### [add_user_to_workspace](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.add_user_to_workspace) #### Adds a user to a workspace. diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..52c5d394 --- /dev/null +++ b/docs/index.md @@ -0,0 +1 @@ +# Welcome to Semantic Link Labs's pages site! \ No newline at end of file From 28df357b80af2ea711a62554788fc1de4b11f292 Mon Sep 17 00:00:00 2001 From: Michael Date: Mon, 9 Sep 2024 11:04:14 +0300 Subject: [PATCH 07/18] added pages.yaml --- .github/workflows/pages.yaml | 40 ++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 .github/workflows/pages.yaml diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml new file mode 100644 index 00000000..949b87e5 --- /dev/null +++ b/.github/workflows/pages.yaml @@ -0,0 +1,40 @@ +on: [workflow_dispatch] + +jobs: + # Build job + build: + # + # At a minimum this job should upload artifacts using actions/upload-pages-artifact + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Build Example Page + run: python .github/workflows/_generate_func_examples.py + shell: bash + + # Deploy job + deploy: + # Add a dependency to the build job + needs: build + + # Grant GITHUB_TOKEN the permissions required to make a Pages deployment + permissions: + pages: write # to deploy to Pages + id-token: write # to verify the deployment originates from an appropriate source + + # Deploy to the github-pages environment + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + # Specify runner + deployment step + runs-on: ubuntu-latest + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 # or specific "vX.X.X" version tag for this action \ No newline at end of file From 766ba7f4173b290325e94711c76caa44358cca57 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:29:34 +0200 Subject: [PATCH 08/18] added workflow dispatch --- .github/workflows/pages.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 949b87e5..30d6cea4 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -1,4 +1,8 @@ -on: [workflow_dispatch] +on: + workflow_dispatch: {} + push: + branches: + - m-kovalsky/dynamicdocumentation jobs: # Build job From 8a23eb6d1926b47110d72f64ba2b2e71468f04b9 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:33:26 +0200 Subject: [PATCH 09/18] added pip installs --- .github/workflows/pages.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 30d6cea4..552e4d61 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -18,7 +18,11 @@ jobs: python-version: "3.10" - name: Build Example Page - run: python .github/workflows/_generate_func_examples.py + run: | + pip install docstring_parser + pip install . + python .github/workflows/_generate_func_examples.py + shell: bash # Deploy job From 797cf0ace3f16d1a546d4b3368a67463dbeb9bae Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:36:25 +0200 Subject: [PATCH 10/18] added azure-core --- .github/workflows/pages.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 552e4d61..177e916c 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -21,6 +21,7 @@ jobs: run: | pip install docstring_parser pip install . + pip install azure-core python .github/workflows/_generate_func_examples.py shell: bash From 8e8ced9fdfea2ab2dd16b2c58bfb0be1934a35e8 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:42:07 +0200 Subject: [PATCH 11/18] azure storage blob --- .github/workflows/_generate_func_examples.py | 3 ++- .github/workflows/pages.yaml | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py index 3b1cdaa6..6e577417 100644 --- a/.github/workflows/_generate_func_examples.py +++ b/.github/workflows/_generate_func_examples.py @@ -130,6 +130,7 @@ def format_docstring_description(description): markdown_example += f"\n### Returns\n> {return_type}; {ret.description}" # Write to file -output_path = '/root/semantic-link-labs/docs/function_examples.md' +output_path = 'build_outputs_folder/index.md' +# output_path = '/root/semantic-link-labs/docs/function_examples.md' with open(output_path, 'w') as f: f.write(markdown_example) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 177e916c..75922bbe 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -21,11 +21,19 @@ jobs: run: | pip install docstring_parser pip install . + md build_output_folder pip install azure-core + pip install azure-storage-blob python .github/workflows/_generate_func_examples.py shell: bash + - name: Upload static files as artifact + id: deployment + uses: actions/upload-pages-artifact@v3 # or specific "vX.X.X" version tag for this action + with: + path: build_outputs_folder/ + # Deploy job deploy: # Add a dependency to the build job From ae9618a905ba9e411e06620b8760c8b68fea8e3c Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:44:27 +0200 Subject: [PATCH 12/18] mkdir --- .github/workflows/pages.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 75922bbe..5d3213de 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -21,7 +21,7 @@ jobs: run: | pip install docstring_parser pip install . - md build_output_folder + mkdir build_output_folder pip install azure-core pip install azure-storage-blob python .github/workflows/_generate_func_examples.py From 3294d7c57639ac676ab35d46f5467e5b33182be6 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:46:23 +0200 Subject: [PATCH 13/18] build_outputs_folder --- .github/workflows/pages.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 5d3213de..ac020d4d 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -21,7 +21,7 @@ jobs: run: | pip install docstring_parser pip install . - mkdir build_output_folder + mkdir build_outputs_folder pip install azure-core pip install azure-storage-blob python .github/workflows/_generate_func_examples.py From 932b5cd7b0ebcc5c449d3bae2061963ac0c181ac Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:51:17 +0200 Subject: [PATCH 14/18] run --- .github/workflows/pages.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index ac020d4d..d5f1d9a7 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -17,7 +17,7 @@ jobs: with: python-version: "3.10" - - name: Build Example Page + - name: Semantic Link Labs Documentation run: | pip install docstring_parser pip install . From 161d79fe1ab94e481fd20418f59d5429c1486205 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 12:55:14 +0200 Subject: [PATCH 15/18] again --- .github/workflows/pages.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index d5f1d9a7..85cf50b5 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -1,3 +1,4 @@ +name: Semantic Link Labs Documentation on: workflow_dispatch: {} push: @@ -17,7 +18,7 @@ jobs: with: python-version: "3.10" - - name: Semantic Link Labs Documentation + - name: Generate Documentation run: | pip install docstring_parser pip install . From 601be23f90083cb8f38fff6037d56c58a8783d46 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 13:07:47 +0200 Subject: [PATCH 16/18] added _config.yml --- .github/workflows/pages.yaml | 2 +- _config.yml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 _config.yml diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 85cf50b5..90faa3f8 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -1,4 +1,4 @@ -name: Semantic Link Labs Documentation +name: Semantic Link Labs Documentation Generator on: workflow_dispatch: {} push: diff --git a/_config.yml b/_config.yml new file mode 100644 index 00000000..cb481420 --- /dev/null +++ b/_config.yml @@ -0,0 +1,2 @@ +markdown: kramdown +theme: minima \ No newline at end of file From 6fa7aad597f0144dc939bc5c0f6278785d0e23a5 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 13:13:33 +0200 Subject: [PATCH 17/18] update --- .github/workflows/_generate_func_examples.py | 4 ++-- src/sempy_labs/tom/_model.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/_generate_func_examples.py b/.github/workflows/_generate_func_examples.py index 6e577417..acffbde6 100644 --- a/.github/workflows/_generate_func_examples.py +++ b/.github/workflows/_generate_func_examples.py @@ -71,8 +71,8 @@ def format_docstring_description(description): return re.sub(pattern_desc, r'[\1](\2)', str(description)) -markdown_example = '# Welcome to the Semantic Link Labs Function Examples!\n\n' -#markdown_example = '## Function Examples\n' +markdown_example = "---\ntitle: Semantic Link Labs\n---" +markdown_example += '\n# Welcome to the Semantic Link Labs Function Examples!\n\n' # Gather necessary ingredients into a dictionary func_dict = {} diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index 6468a670..fbf0eeee 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -2409,7 +2409,7 @@ def set_is_available_in_mdx( self.model.Tables[table_name].Columns[column_name].IsAvailableInMDX = value def set_summarize_by( - self, table_name: str, column_name: str, value: Optional[str] = 'Default' + self, table_name: str, column_name: str, value: Optional[str] = "Default" ): """ Sets the `SummarizeBy `_ property on a column. From 3d20485f46d8f0c65c472d29f65328286c375145 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 21 Nov 2024 13:20:00 +0200 Subject: [PATCH 18/18] v3 --- .github/workflows/pages.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pages.yaml b/.github/workflows/pages.yaml index 90faa3f8..7bc9eb91 100644 --- a/.github/workflows/pages.yaml +++ b/.github/workflows/pages.yaml @@ -12,7 +12,7 @@ jobs: # At a minimum this job should upload artifacts using actions/upload-pages-artifact runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: