From 5e4664fba45631cbb9c04ff03a97263d158e8a48 Mon Sep 17 00:00:00 2001 From: Steve Messick Date: Wed, 1 May 2024 20:06:03 +0000 Subject: [PATCH] Address review comments; add work-around for autogen.sh failure --- README.md | 19 +- docs/KaggleApi.md | 166 ++-- kaggle/api/kaggle_api.py | 310 +++---- kaggle/api/kaggle_api_extended.py | 506 ++++++----- kaggle/cli.py | 1013 ++++++++++++----------- kaggle/models/kaggle_models_extended.py | 17 - kaggle/test/test_authenticate.py | 1 - src/KaggleSwagger.yaml | 31 +- src/kaggle/api/kaggle_api_extended.py | 69 +- src/kaggle/cli.py | 30 +- tests/test_commands.sh | 4 +- 11 files changed, 1204 insertions(+), 962 deletions(-) diff --git a/README.md b/README.md index cfb9436..d6a06aa 100644 --- a/README.md +++ b/README.md @@ -16,15 +16,28 @@ pip install kaggle ## Development +### Kaggle Internal + +Obviously, this depends on Kaggle services. When you're extending the API and modifying +or adding to those services, you should be working in your Kaggle mid-tier development +environment. You'll run Kaggle locally, in the container, and test the Python code by +running it in the container so it can connect to your local testing environment. + +Also, run the following command to get `autogen.sh` installed: ```bash +rm -rf /tmp/autogen && mkdir -p /tmp/autogen && unzip -qo /tmp/autogen.zip -d /tmp/autogen && +mv /tmp/autogen/autogen-*/* /tmp/autogen && rm -rf /tmp/autogen/autogen-* && +sudo chmod a+rx /tmp/autogen/autogen.sh +``` + ### Prerequisites We use [hatch](https://hatch.pypa.io) to manage this project. Follow these [instructions](https://hatch.pypa.io/latest/install/) to install it. -If you are working in a managed environment, you may want to use `pipx`. If it isn't already installed -try `sudo apt install pipx`. Then you should be able to proceed with `pipx install hatch`. - +If you are working in a managed environment, you may want to use `pipx`. If it isn't already installed +try `sudo apt install pipx`. Then you should be able to proceed with `pipx install hatch`. + ### Dependencies ```sh diff --git a/docs/KaggleApi.md b/docs/KaggleApi.md index 5cbaa4e..36d523c 100644 --- a/docs/KaggleApi.md +++ b/docs/KaggleApi.md @@ -36,13 +36,13 @@ Method | HTTP request | Description [**kernels_list_files**](KaggleApi.md#kernels_list_files) | **GET** /kernels/files | List kernel files [**metadata_get**](KaggleApi.md#metadata_get) | **GET** /datasets/metadata/{ownerSlug}/{datasetSlug} | Get the metadata for a dataset [**metadata_post**](KaggleApi.md#metadata_post) | **POST** /datasets/metadata/{ownerSlug}/{datasetSlug} | Update the metadata for a dataset +[**model_instance_files**](KaggleApi.md#model_instance_files) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files | List model instance files for the current version +[**model_instance_version_files**](KaggleApi.md#model_instance_version_files) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files | List model instance version files [**model_instance_versions_download**](KaggleApi.md#model_instance_versions_download) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/download | Download model instance version files -[**model_instance_versions_files**](KaggleApi.md#model_instance_versions_files) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files | List model instance version files [**models_create_instance**](KaggleApi.md#models_create_instance) | **POST** /models/{ownerSlug}/{modelSlug}/create/instance | Create a new model instance [**models_create_instance_version**](KaggleApi.md#models_create_instance_version) | **POST** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/create/version | Create a new model instance version [**models_create_new**](KaggleApi.md#models_create_new) | **POST** /models/create/new | Create a new model [**models_list**](KaggleApi.md#models_list) | **GET** /models/list | Lists models -[**models_list_files**](KaggleApi.md#models_list_files) | **GET** /models/list/{ownerSlug}/{modelSlug} | List model files [**update_model**](KaggleApi.md#update_model) | **POST** /models/{ownerSlug}/{modelSlug}/update | Update a model [**update_model_instance**](KaggleApi.md#update_model_instance) | **POST** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/update | Update a model [**upload_file**](KaggleApi.md#upload_file) | **POST** /blobs/upload | Start uploading a file @@ -1605,7 +1605,7 @@ configuration.password = 'YOUR_PASSWORD' # create an instance of the API class api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) page = 1 # int | Page number (optional) (default to 1) -page_size = 20 # int | Page size (optional) (default to 20) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) search = '' # str | Search terms (optional) (default to ) group = 'everyone' # str | Display only your kernels (optional) (default to everyone) user = 'user_example' # str | Display kernels by a particular group (optional) @@ -1630,7 +1630,7 @@ except ApiException as e: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **page** | **int**| Page number | [optional] [default to 1] - **page_size** | **int**| Page size | [optional] [default to 20] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] **search** | **str**| Search terms | [optional] [default to ] **group** | **str**| Display only your kernels | [optional] [default to everyone] **user** | **str**| Display kernels by a particular group | [optional] @@ -1680,7 +1680,7 @@ api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) user_name = 'user_name_example' # str | Kernel owner kernel_slug = 'kernel_slug_example' # str | Kernel name kernel_version_number = 'kernel_version_number_example' # str | Kernel version number (optional) -page_size = 20 # int | Page size (optional) (default to 20) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) page_token = 'page_token_example' # str | Page token for pagination (optional) try: @@ -1698,7 +1698,7 @@ Name | Type | Description | Notes **user_name** | **str**| Kernel owner | **kernel_slug** | **str**| Kernel name | **kernel_version_number** | **str**| Kernel version number | [optional] - **page_size** | **int**| Page size | [optional] [default to 20] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] **page_token** | **str**| Page token for pagination | [optional] ### Return type @@ -1824,10 +1824,10 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **model_instance_versions_download** -> Result model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number) +# **model_instance_files** +> Result model_instance_files(owner_slug, model_slug, framework, instance_slug, page_size=page_size, page_token=page_token) -Download model instance version files +List model instance files for the current version ### Example ```python @@ -1848,14 +1848,15 @@ owner_slug = 'owner_slug_example' # str | Model owner model_slug = 'model_slug_example' # str | Model name framework = 'framework_example' # str | Model instance framework instance_slug = 'instance_slug_example' # str | Model instance slug -version_number = 'version_number_example' # str | Model instance version number +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) +page_token = 'page_token_example' # str | Page token for pagination (optional) try: - # Download model instance version files - api_response = api_instance.model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number) + # List model instance files for the current version + api_response = api_instance.model_instance_files(owner_slug, model_slug, framework, instance_slug, page_size=page_size, page_token=page_token) pprint(api_response) except ApiException as e: - print("Exception when calling KaggleApi->model_instance_versions_download: %s\n" % e) + print("Exception when calling KaggleApi->model_instance_files: %s\n" % e) ``` ### Parameters @@ -1866,7 +1867,8 @@ Name | Type | Description | Notes **model_slug** | **str**| Model name | **framework** | **str**| Model instance framework | **instance_slug** | **str**| Model instance slug | - **version_number** | **str**| Model instance version number | + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] + **page_token** | **str**| Page token for pagination | [optional] ### Return type @@ -1879,12 +1881,12 @@ Name | Type | Description | Notes ### HTTP request headers - **Content-Type**: Not defined - - **Accept**: file + - **Accept**: application/json [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **model_instance_versions_files** -> Result model_instance_versions_files(owner_slug, model_slug, framework, instance_slug, version_number, page_size=page_size, page_token=page_token) +# **model_instance_version_files** +> Result model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, page_size=page_size, page_token=page_token) List model instance version files @@ -1908,15 +1910,15 @@ model_slug = 'model_slug_example' # str | Model name framework = 'framework_example' # str | Model instance framework instance_slug = 'instance_slug_example' # str | Model instance slug version_number = 'version_number_example' # str | Model instance version number -page_size = 20 # int | Page size (optional) (default to 20) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) page_token = 'page_token_example' # str | Page token for pagination (optional) try: # List model instance version files - api_response = api_instance.model_instance_versions_files(owner_slug, model_slug, framework, instance_slug, version_number, page_size=page_size, page_token=page_token) + api_response = api_instance.model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, page_size=page_size, page_token=page_token) pprint(api_response) except ApiException as e: - print("Exception when calling KaggleApi->model_instance_versions_files: %s\n" % e) + print("Exception when calling KaggleApi->model_instance_version_files: %s\n" % e) ``` ### Parameters @@ -1928,7 +1930,7 @@ Name | Type | Description | Notes **framework** | **str**| Model instance framework | **instance_slug** | **str**| Model instance slug | **version_number** | **str**| Model instance version number | - **page_size** | **int**| Page size | [optional] [default to 20] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] **page_token** | **str**| Page token for pagination | [optional] ### Return type @@ -1946,6 +1948,65 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) +# **model_instance_versions_download** +> Result model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number) + +Download model instance version files + +### Example +```python +from __future__ import print_function +import time +import kaggle +from kaggle.rest import ApiException +from pprint import pprint + +# Configure HTTP basic authorization: basicAuth +configuration = kaggle.Configuration() +configuration.username = 'YOUR_USERNAME' +configuration.password = 'YOUR_PASSWORD' + +# create an instance of the API class +api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) +owner_slug = 'owner_slug_example' # str | Model owner +model_slug = 'model_slug_example' # str | Model name +framework = 'framework_example' # str | Model instance framework +instance_slug = 'instance_slug_example' # str | Model instance slug +version_number = 'version_number_example' # str | Model instance version number + +try: + # Download model instance version files + api_response = api_instance.model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number) + pprint(api_response) +except ApiException as e: + print("Exception when calling KaggleApi->model_instance_versions_download: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **owner_slug** | **str**| Model owner | + **model_slug** | **str**| Model name | + **framework** | **str**| Model instance framework | + **instance_slug** | **str**| Model instance slug | + **version_number** | **str**| Model instance version number | + +### Return type + +[**Result**](Result.md) + +### Authorization + +[basicAuth](../README.md#basicAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: file + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **models_create_instance** > Result models_create_instance(owner_slug, model_slug, model_new_instance_request) @@ -2134,7 +2195,7 @@ api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) search = '' # str | Search terms (optional) (default to ) sort_by = 'hotness' # str | Sort the results (optional) (default to hotness) owner = 'owner_example' # str | Display models by a specific user or organization (optional) -page_size = 20 # int | Page size (optional) (default to 20) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) page_token = 'page_token_example' # str | Page token for pagination (optional) try: @@ -2152,66 +2213,7 @@ Name | Type | Description | Notes **search** | **str**| Search terms | [optional] [default to ] **sort_by** | **str**| Sort the results | [optional] [default to hotness] **owner** | **str**| Display models by a specific user or organization | [optional] - **page_size** | **int**| Page size | [optional] [default to 20] - **page_token** | **str**| Page token for pagination | [optional] - -### Return type - -[**Result**](Result.md) - -### Authorization - -[basicAuth](../README.md#basicAuth) - -### HTTP request headers - - - **Content-Type**: Not defined - - **Accept**: application/json - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **models_list_files** -> Result models_list_files(owner_slug, model_slug, model_version_number=model_version_number, page_size=page_size, page_token=page_token) - -List model files - -### Example -```python -from __future__ import print_function -import time -import kaggle -from kaggle.rest import ApiException -from pprint import pprint - -# Configure HTTP basic authorization: basicAuth -configuration = kaggle.Configuration() -configuration.username = 'YOUR_USERNAME' -configuration.password = 'YOUR_PASSWORD' - -# create an instance of the API class -api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) -owner_slug = 'owner_slug_example' # str | Model owner -model_slug = 'model_slug_example' # str | Model name -model_version_number = 'model_version_number_example' # str | Model version number (optional) -page_size = 20 # int | Page size (optional) (default to 20) -page_token = 'page_token_example' # str | Page token for pagination (optional) - -try: - # List model files - api_response = api_instance.models_list_files(owner_slug, model_slug, model_version_number=model_version_number, page_size=page_size, page_token=page_token) - pprint(api_response) -except ApiException as e: - print("Exception when calling KaggleApi->models_list_files: %s\n" % e) -``` - -### Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **owner_slug** | **str**| Model owner | - **model_slug** | **str**| Model name | - **model_version_number** | **str**| Model version number | [optional] - **page_size** | **int**| Page size | [optional] [default to 20] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] **page_token** | **str**| Page token for pagination | [optional] ### Return type diff --git a/kaggle/api/kaggle_api.py b/kaggle/api/kaggle_api.py index 15d6834..28a0ba3 100644 --- a/kaggle/api/kaggle_api.py +++ b/kaggle/api/kaggle_api.py @@ -3477,12 +3477,12 @@ def metadata_post_with_http_info(self, owner_slug, dataset_slug, settings, **kwa _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def model_instance_versions_download(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """Download model instance version files # noqa: E501 + def model_instance_files(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 + """List model instance files for the current version # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> thread = api.model_instance_files(owner_slug, model_slug, framework, instance_slug, async_req=True) >>> result = thread.get() :param async_req bool @@ -3490,24 +3490,25 @@ def model_instance_versions_download(self, owner_slug, model_slug, framework, in :param str model_slug: Model name (required) :param str framework: Model instance framework (required) :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination :return: Result If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + return self.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 else: - (data) = self.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + (data) = self.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 return data - def model_instance_versions_download_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """Download model instance version files # noqa: E501 + def model_instance_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 + """List model instance files for the current version # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> thread = api.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, async_req=True) >>> result = thread.get() :param async_req bool @@ -3515,13 +3516,14 @@ def model_instance_versions_download_with_http_info(self, owner_slug, model_slug :param str model_slug: Model name (required) :param str framework: Model instance framework (required) :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination :return: Result If the method is called asynchronously, returns the request thread. """ - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'version_number'] # noqa: E501 + all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'page_size', 'page_token'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3532,30 +3534,26 @@ def model_instance_versions_download_with_http_info(self, owner_slug, model_slug if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method model_instance_versions_download" % key + " to method model_instance_files" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner_slug' is set if ('owner_slug' not in params or params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_versions_download`") # noqa: E501 + raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_files`") # noqa: E501 # verify the required parameter 'model_slug' is set if ('model_slug' not in params or params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_versions_download`") # noqa: E501 + raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_files`") # noqa: E501 # verify the required parameter 'framework' is set if ('framework' not in params or params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `model_instance_versions_download`") # noqa: E501 + raise ValueError("Missing the required parameter `framework` when calling `model_instance_files`") # noqa: E501 # verify the required parameter 'instance_slug' is set if ('instance_slug' not in params or params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_versions_download`") # noqa: E501 - # verify the required parameter 'version_number' is set - if ('version_number' not in params or - params['version_number'] is None): - raise ValueError("Missing the required parameter `version_number` when calling `model_instance_versions_download`") # noqa: E501 + raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_files`") # noqa: E501 collection_formats = {} @@ -3568,10 +3566,12 @@ def model_instance_versions_download_with_http_info(self, owner_slug, model_slug path_params['framework'] = params['framework'] # noqa: E501 if 'instance_slug' in params: path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - if 'version_number' in params: - path_params['versionNumber'] = params['version_number'] # noqa: E501 query_params = [] + if 'page_size' in params: + query_params.append(('pageSize', params['page_size'])) # noqa: E501 + if 'page_token' in params: + query_params.append(('pageToken', params['page_token'])) # noqa: E501 header_params = {} @@ -3581,13 +3581,13 @@ def model_instance_versions_download_with_http_info(self, owner_slug, model_slug body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( - ['file']) # noqa: E501 + ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['basicAuth'] # noqa: E501 return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/download', 'GET', + '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files', 'GET', path_params, query_params, header_params, @@ -3602,12 +3602,12 @@ def model_instance_versions_download_with_http_info(self, owner_slug, model_slug _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def model_instance_versions_files(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 + def model_instance_version_files(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 """List model instance version files # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_versions_files(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> thread = api.model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) >>> result = thread.get() :param async_req bool @@ -3624,17 +3624,17 @@ def model_instance_versions_files(self, owner_slug, model_slug, framework, insta """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.model_instance_versions_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + return self.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 else: - (data) = self.model_instance_versions_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + (data) = self.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 return data - def model_instance_versions_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 + def model_instance_version_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 """List model instance version files # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_versions_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> thread = api.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) >>> result = thread.get() :param async_req bool @@ -3661,30 +3661,30 @@ def model_instance_versions_files_with_http_info(self, owner_slug, model_slug, f if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method model_instance_versions_files" % key + " to method model_instance_version_files" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'owner_slug' is set if ('owner_slug' not in params or params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_versions_files`") # noqa: E501 + raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_version_files`") # noqa: E501 # verify the required parameter 'model_slug' is set if ('model_slug' not in params or params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_versions_files`") # noqa: E501 + raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_version_files`") # noqa: E501 # verify the required parameter 'framework' is set if ('framework' not in params or params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `model_instance_versions_files`") # noqa: E501 + raise ValueError("Missing the required parameter `framework` when calling `model_instance_version_files`") # noqa: E501 # verify the required parameter 'instance_slug' is set if ('instance_slug' not in params or params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_versions_files`") # noqa: E501 + raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_version_files`") # noqa: E501 # verify the required parameter 'version_number' is set if ('version_number' not in params or params['version_number'] is None): - raise ValueError("Missing the required parameter `version_number` when calling `model_instance_versions_files`") # noqa: E501 + raise ValueError("Missing the required parameter `version_number` when calling `model_instance_version_files`") # noqa: E501 collection_formats = {} @@ -3735,6 +3735,131 @@ def model_instance_versions_files_with_http_info(self, owner_slug, model_slug, f _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) + def model_instance_versions_download(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 + """Download model instance version files # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str owner_slug: Model owner (required) + :param str model_slug: Model name (required) + :param str framework: Model instance framework (required) + :param str instance_slug: Model instance slug (required) + :param str version_number: Model instance version number (required) + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + else: + (data) = self.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + return data + + def model_instance_versions_download_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 + """Download model instance version files # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str owner_slug: Model owner (required) + :param str model_slug: Model name (required) + :param str framework: Model instance framework (required) + :param str instance_slug: Model instance slug (required) + :param str version_number: Model instance version number (required) + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'version_number'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method model_instance_versions_download" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'owner_slug' is set + if ('owner_slug' not in params or + params['owner_slug'] is None): + raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_versions_download`") # noqa: E501 + # verify the required parameter 'model_slug' is set + if ('model_slug' not in params or + params['model_slug'] is None): + raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_versions_download`") # noqa: E501 + # verify the required parameter 'framework' is set + if ('framework' not in params or + params['framework'] is None): + raise ValueError("Missing the required parameter `framework` when calling `model_instance_versions_download`") # noqa: E501 + # verify the required parameter 'instance_slug' is set + if ('instance_slug' not in params or + params['instance_slug'] is None): + raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_versions_download`") # noqa: E501 + # verify the required parameter 'version_number' is set + if ('version_number' not in params or + params['version_number'] is None): + raise ValueError("Missing the required parameter `version_number` when calling `model_instance_versions_download`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'owner_slug' in params: + path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 + if 'model_slug' in params: + path_params['modelSlug'] = params['model_slug'] # noqa: E501 + if 'framework' in params: + path_params['framework'] = params['framework'] # noqa: E501 + if 'instance_slug' in params: + path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 + if 'version_number' in params: + path_params['versionNumber'] = params['version_number'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['file']) # noqa: E501 + + # Authentication setting + auth_settings = ['basicAuth'] # noqa: E501 + + return self.api_client.call_api( + '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/download', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='Result', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + def models_create_instance(self, owner_slug, model_slug, model_new_instance_request, **kwargs): # noqa: E501 """Create a new model instance # noqa: E501 @@ -4179,119 +4304,6 @@ def models_list_with_http_info(self, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def models_list_files(self, owner_slug, model_slug, **kwargs): # noqa: E501 - """List model files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_list_files(owner_slug, model_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str model_version_number: Model version number - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.models_list_files_with_http_info(owner_slug, model_slug, **kwargs) # noqa: E501 - else: - (data) = self.models_list_files_with_http_info(owner_slug, model_slug, **kwargs) # noqa: E501 - return data - - def models_list_files_with_http_info(self, owner_slug, model_slug, **kwargs): # noqa: E501 - """List model files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_list_files_with_http_info(owner_slug, model_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str model_version_number: Model version number - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'model_version_number', 'page_size', 'page_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method models_list_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `models_list_files`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `models_list_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - - query_params = [] - if 'model_version_number' in params: - query_params.append(('modelVersionNumber', params['model_version_number'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/list/{ownerSlug}/{modelSlug}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - def update_model(self, owner_slug, model_slug, model_update_request, **kwargs): # noqa: E501 """Update a model # noqa: E501 diff --git a/kaggle/api/kaggle_api_extended.py b/kaggle/api/kaggle_api_extended.py index 16f5787..47f8211 100644 --- a/kaggle/api/kaggle_api_extended.py +++ b/kaggle/api/kaggle_api_extended.py @@ -99,7 +99,6 @@ class DirectoryArchive(object): - def __init__(self, fullpath, format): self._fullpath = fullpath self._format = format @@ -109,8 +108,9 @@ def __init__(self, fullpath, format): def __enter__(self): self._temp_dir = tempfile.mkdtemp() _, dir_name = os.path.split(self._fullpath) - self.path = shutil.make_archive(os.path.join(self._temp_dir, dir_name), - self._format, self._fullpath) + self.path = shutil.make_archive( + os.path.join(self._temp_dir, dir_name), self._format, + self._fullpath) _, self.name = os.path.split(self.path) return self @@ -119,7 +119,6 @@ def __exit__(self, *args): class ResumableUploadContext(object): - def __init__(self, no_resume=False): self.no_resume = no_resume self._temp_dir = os.path.join(tempfile.gettempdir(), '.kaggle/uploads') @@ -365,7 +364,6 @@ def with_retry(self, initial_delay_millis=500, retry_multiplier=1.7, randomness_factor=0.5): - def retriable_func(*args): for i in range(1, max_retries + 1): try: @@ -375,9 +373,8 @@ def retriable_func(*args): total_delay = self._calculate_backoff_delay( i, initial_delay_millis, retry_multiplier, randomness_factor) - print( - 'Request failed: %s. Will retry in %2.1f seconds' % - (e, total_delay)) + print('Request failed: %s. Will retry in %2.1f seconds' + % (e, total_delay)) time.sleep(total_delay) continue raise @@ -405,8 +402,8 @@ def authenticate(self): or self.CONFIG_NAME_KEY not in config_data: if os.path.exists(self.config): config_data = self.read_config_file(config_data) - elif self._is_help_or_version_command(api_command) or (len( - sys.argv) > 2 and api_command.startswith( + elif self._is_help_or_version_command(api_command) or ( + len(sys.argv) > 2 and api_command.startswith( self.command_prefixes_allowing_anonymous_access)): # Some API commands should be allowed without authentication. return @@ -484,8 +481,8 @@ def _load_config(self, config_data): # Cert File if self.CONFIG_NAME_SSL_CA_CERT in config_data: - configuration.ssl_ca_cert = config_data[ - self.CONFIG_NAME_SSL_CA_CERT] + configuration.ssl_ca_cert = config_data[self. + CONFIG_NAME_SSL_CA_CERT] # Keep config values with class instance, and load api client! @@ -706,11 +703,12 @@ def competitions_list(self, str(self.valid_competition_sort_by)) competitions_list_result = self.process_response( - self.competitions_list_with_http_info(group=group or '', - category=category or '', - sort_by=sort_by or '', - page=page, - search=search or '')) + self.competitions_list_with_http_info( + group=group or '', + category=category or '', + sort_by=sort_by or '', + page=page, + search=search or '')) return [Competition(c) for c in competitions_list_result] def competitions_list_cli(self, @@ -731,11 +729,12 @@ def competitions_list_cli(self, search: a search term to use (default is empty string) csv_display: if True, print comma separated values """ - competitions = self.competitions_list(group=group, - category=category, - sort_by=sort_by, - page=page, - search=search) + competitions = self.competitions_list( + group=group, + category=category, + sort_by=sort_by, + page=page, + search=search) fields = [ 'ref', 'deadline', 'category', 'reward', 'teamCount', 'userHasEntered' @@ -788,9 +787,8 @@ def competition_submit(self, file_name, message, competition, quiet=False): upload_result_token = upload_result['token'] else: # New submissions path! - upload_status = self.upload_complete(file_name, - url_result['createUrl'], - quiet) + upload_status = self.upload_complete( + file_name, url_result['createUrl'], quiet) if upload_status != ResumableUploadResult.COMPLETE: # Actual error is printed during upload_complete. Not # ideal but changing would not be backwards compatible @@ -1165,9 +1163,8 @@ def dataset_list(self, ) if file_type and file_type not in self.valid_dataset_file_types: - raise ValueError( - 'Invalid file type specified. Valid options are ' + - str(self.valid_dataset_file_types)) + raise ValueError('Invalid file type specified. Valid options are ' + + str(self.valid_dataset_file_types)) if license_name and license_name not in self.valid_dataset_license_names: raise ValueError('Invalid license specified. Valid options are ' + @@ -1193,17 +1190,18 @@ def dataset_list(self, group = 'user' datasets_list_result = self.process_response( - self.datasets_list_with_http_info(group=group, - sort_by=sort_by or 'hottest', - size=size, - filetype=file_type or 'all', - license=license_name or 'all', - tagids=tag_ids or '', - search=search or '', - user=user or '', - page=page, - max_size=max_size, - min_size=min_size)) + self.datasets_list_with_http_info( + group=group, + sort_by=sort_by or 'hottest', + size=size, + filetype=file_type or 'all', + license=license_name or 'all', + tagids=tag_ids or '', + search=search or '', + user=user or '', + page=page, + max_size=max_size, + min_size=min_size)) return [Dataset(d) for d in datasets_list_result] def dataset_list_cli(self, @@ -1405,8 +1403,8 @@ def dataset_status(self, dataset): owner_slug = self.get_config_value(self.CONFIG_NAME_USER) dataset_slug = dataset dataset_status_result = self.process_response( - self.datasets_status_with_http_info(owner_slug=owner_slug, - dataset_slug=dataset_slug)) + self.datasets_status_with_http_info( + owner_slug=owner_slug, dataset_slug=dataset_slug)) return dataset_status_result def dataset_status_cli(self, dataset, dataset_opt=None): @@ -1588,19 +1586,21 @@ def dataset_download_cli(self, ] if file_name is None: - self.dataset_download_files(dataset, - path=path, - unzip=unzip, - force=force, - quiet=quiet, - licenses=licenses) + self.dataset_download_files( + dataset, + path=path, + unzip=unzip, + force=force, + quiet=quiet, + licenses=licenses) else: - self.dataset_download_file(dataset, - file_name, - path=path, - force=force, - quiet=quiet, - licenses=licenses) + self.dataset_download_file( + dataset, + file_name, + path=path, + force=force, + quiet=quiet, + licenses=licenses) def _upload_blob(self, path, quiet, blob_type, upload_context): """ upload a file @@ -1761,9 +1761,8 @@ def dataset_create_version_cli(self, ('The following are not valid tags and could not be added to ' 'the dataset: ') + str(result.invalidTags)) elif result.status.lower() == 'ok': - print( - 'Dataset version is being created. Please check progress at ' + - result.url) + print('Dataset version is being created. Please check progress at ' + + result.url) else: print('Dataset version creation error: ' + result.error) @@ -1830,8 +1829,8 @@ def dataset_create_new(self, dataset_slug = ref_list[1] # validations - if ref == self.config_values[ - self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': + if ref == self.config_values[self. + CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': raise ValueError( 'Default slug detected, please change values before uploading') if title == 'INSERT_TITLE_HERE': @@ -1859,16 +1858,17 @@ def dataset_create_new(self, raise ValueError( 'Subtitle length must be between 20 and 80 characters') - request = DatasetNewRequest(title=title, - slug=dataset_slug, - owner_slug=owner_slug, - license_name=license_name, - subtitle=subtitle, - description=description, - files=[], - is_private=not public, - convert_to_csv=convert_to_csv, - category_ids=keywords) + request = DatasetNewRequest( + title=title, + slug=dataset_slug, + owner_slug=owner_slug, + license_name=license_name, + subtitle=subtitle, + description=description, + files=[], + is_private=not public, + convert_to_csv=convert_to_csv, + category_ids=keywords) with ResumableUploadContext() as upload_context: self.upload_files(request, resources, folder, ApiBlobType.DATASET, @@ -1963,28 +1963,30 @@ def download_file(self, headers={'Range': 'bytes=%d-' % (size_read, )}, _preload_content=False) - with tqdm(total=size, - initial=size_read, - unit='B', - unit_scale=True, - unit_divisor=1024, - disable=quiet) as pbar: + with tqdm( + total=size, + initial=size_read, + unit='B', + unit_scale=True, + unit_divisor=1024, + disable=quiet) as pbar: with open(outfile, open_mode) as out: while True: data = response.read(chunk_size) if not data: break out.write(data) - os.utime(outfile, - times=(remote_date_timestamp - 1, - remote_date_timestamp - 1)) + os.utime( + outfile, + times=(remote_date_timestamp - 1, + remote_date_timestamp - 1)) size_read = min(size, size_read + chunk_size) pbar.update(len(data)) if not quiet: print('\n', end='') - os.utime(outfile, - times=(remote_date_timestamp, remote_date_timestamp)) + os.utime( + outfile, times=(remote_date_timestamp, remote_date_timestamp)) def kernels_list(self, page=1, @@ -2055,18 +2057,19 @@ def kernels_list(self, group = 'profile' kernels_list_result = self.process_response( - self.kernels_list_with_http_info(page=page, - page_size=page_size, - group=group, - user=user or '', - language=language or 'all', - kernel_type=kernel_type or 'all', - output_type=output_type or 'all', - sort_by=sort_by or 'hotness', - dataset=dataset or '', - competition=competition or '', - parent_kernel=parent_kernel or '', - search=search or '')) + self.kernels_list_with_http_info( + page=page, + page_size=page_size, + group=group, + user=user or '', + language=language or 'all', + kernel_type=kernel_type or 'all', + output_type=output_type or 'all', + sort_by=sort_by or 'hotness', + dataset=dataset or '', + competition=competition or '', + parent_kernel=parent_kernel or '', + search=search or '')) return [Kernel(k) for k in kernels_list_result] def kernels_list_cli(self, @@ -2089,18 +2092,19 @@ def kernels_list_cli(self, ========== csv_display: if True, print comma separated values instead of table """ - kernels = self.kernels_list(page=page, - page_size=page_size, - search=search, - mine=mine, - dataset=dataset, - competition=competition, - parent_kernel=parent, - user=user, - language=language, - kernel_type=kernel_type, - output_type=output_type, - sort_by=sort_by) + kernels = self.kernels_list( + page=page, + page_size=page_size, + search=search, + mine=mine, + dataset=dataset, + competition=competition, + parent_kernel=parent, + user=user, + language=language, + kernel_type=kernel_type, + output_type=output_type, + sort_by=sort_by) fields = ['ref', 'title', 'author', 'lastRunTime', 'totalVotes'] if kernels: if csv_display: @@ -2125,10 +2129,11 @@ def kernels_list_files(self, kernel, page_token=None, page_size=20): kernel) kernels_list_files_result = self.process_response( - self.kernels_list_files_with_http_info(kernel_slug=kernel_slug, - user_name=user_name, - page_token=page_token, - page_size=page_size)) + self.kernels_list_files_with_http_info( + kernel_slug=kernel_slug, + user_name=user_name, + page_token=page_token, + page_size=page_size)) return FileList(kernels_list_files_result) def kernels_list_files_cli(self, @@ -2191,11 +2196,11 @@ def kernels_initialize(self, folder): 'code_file': 'INSERT_CODE_FILE_PATH_HERE', 'language': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_language_types) + '}', + 'Pick one of: {' + ','.join( + x for x in self.valid_push_language_types) + '}', 'kernel_type': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_kernel_types) + '}', + 'Pick one of: {' + ','.join( + x for x in self.valid_push_kernel_types) + '}', 'is_private': 'true', 'enable_gpu': @@ -2305,9 +2310,8 @@ def kernels_push(self, folder): for source in model_sources: self.validate_model_string(source) - docker_pinning_type = self.get_or_default(meta_data, - 'docker_image_pinning_type', - None) + docker_pinning_type = self.get_or_default( + meta_data, 'docker_image_pinning_type', None) if (docker_pinning_type is not None and docker_pinning_type not in self.valid_push_pinning_types): raise ValueError( @@ -2481,8 +2485,8 @@ def kernels_pull(self, kernel, path, metadata=False, quiet=True): if file_name is None: print( 'Unknown language %s + kernel type %s - please report this ' - 'on the kaggle-api github issues' % - (language, kernel_type)) + 'on the kaggle-api github issues' % (language, + kernel_type)) print( 'Saving as a python file, even though this may not be the ' 'correct language') @@ -2538,10 +2542,8 @@ def kernels_pull_cli(self, """ client wrapper for kernels_pull """ kernel = kernel or kernel_opt - effective_path = self.kernels_pull(kernel, - path=path, - metadata=metadata, - quiet=False) + effective_path = self.kernels_pull( + kernel, path=path, metadata=metadata, quiet=False) if metadata: print('Source code and metadata downloaded to ' + effective_path) else: @@ -2723,11 +2725,12 @@ def model_list(self, raise ValueError('Page size must be >= 1') models_list_result = self.process_response( - self.models_list_with_http_info(sort_by=sort_by or 'hotness', - search=search or '', - owner=owner or '', - page_size=page_size, - page_token=page_token)) + self.models_list_with_http_info( + sort_by=sort_by or 'hotness', + search=search or '', + owner=owner or '', + page_size=page_size, + page_token=page_token)) next_page_token = models_list_result['nextPageToken'] if next_page_token: @@ -2774,12 +2777,18 @@ def model_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': 'INSERT_OWNER_SLUG_HERE', - 'title': 'INSERT_TITLE_HERE', - 'slug': 'INSERT_SLUG_HERE', - 'subtitle': '', - 'isPrivate': True, - 'description': '''# Model Summary + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'title': + 'INSERT_TITLE_HERE', + 'slug': + 'INSERT_SLUG_HERE', + 'subtitle': + '', + 'isPrivate': + True, + 'description': + '''# Model Summary # Model Characteristics @@ -2787,8 +2796,10 @@ def model_initialize(self, folder): # Evaluation Results ''', - 'publishTime': '', - 'provenanceSources': '' + 'publishTime': + '', + 'provenanceSources': + '' } meta_file = os.path.join(folder, self.MODEL_METADATA_FILE) with open(meta_file, 'w') as f: @@ -2842,14 +2853,15 @@ def model_create_new(self, folder): if publish_time: self.validate_date(publish_time) - request = ModelNewRequest(owner_slug=owner_slug, - slug=slug, - title=title, - subtitle=subtitle, - is_private=is_private, - description=description, - publish_time=publish_time, - provenance_sources=provenance_sources) + request = ModelNewRequest( + owner_slug=owner_slug, + slug=slug, + title=title, + subtitle=subtitle, + is_private=is_private, + description=description, + publish_time=publish_time, + provenance_sources=provenance_sources) result = ModelNewResponse( self.process_response( self.models_create_new_with_http_info(request))) @@ -2961,13 +2973,14 @@ def model_update(self, folder): if provenance_sources != None: update_mask['paths'].append('provenance_sources') - request = ModelUpdateRequest(title=title, - subtitle=subtitle, - is_private=is_private, - description=description, - publish_time=publish_time, - provenance_sources=provenance_sources, - update_mask=update_mask) + request = ModelUpdateRequest( + title=title, + subtitle=subtitle, + is_private=is_private, + description=description, + publish_time=publish_time, + provenance_sources=provenance_sources, + update_mask=update_mask) result = ModelNewResponse( self.process_response( self.update_model_with_http_info(owner_slug, slug, request))) @@ -3059,12 +3072,18 @@ def model_instance_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': 'INSERT_OWNER_SLUG_HERE', - 'modelSlug': 'INSERT_EXISTING_MODEL_SLUG_HERE', - 'instanceSlug': 'INSERT_INSTANCE_SLUG_HERE', - 'framework': 'INSERT_FRAMEWORK_HERE', - 'overview': '', - 'usage': '''# Model Format + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'modelSlug': + 'INSERT_EXISTING_MODEL_SLUG_HERE', + 'instanceSlug': + 'INSERT_INSTANCE_SLUG_HERE', + 'framework': + 'INSERT_FRAMEWORK_HERE', + 'overview': + '', + 'usage': + '''# Model Format # Training Data @@ -3078,12 +3097,17 @@ def model_instance_initialize(self, folder): # Changelog ''', - 'licenseName': 'Apache 2.0', - 'fineTunable': False, + 'licenseName': + 'Apache 2.0', + 'fineTunable': + False, 'trainingData': [], - 'modelInstanceType': 'Unspecified', - 'baseModelInstanceId': 0, - 'externalBaseModelUrl': '' + 'modelInstanceType': + 'Unspecified', + 'baseModelInstanceId': + 0, + 'externalBaseModelUrl': + '' } meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE) with open(meta_file, 'w') as f: @@ -3123,14 +3147,12 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): license_name = self.get_or_fail(meta_data, 'licenseName') fine_tunable = self.get_or_default(meta_data, 'fineTunable', False) training_data = self.get_or_default(meta_data, 'trainingData', []) - model_instance_type = self.get_or_default(meta_data, - 'modelInstanceType', - 'Unspecified') + model_instance_type = self.get_or_default( + meta_data, 'modelInstanceType', 'Unspecified') base_model_instance = self.get_or_default(meta_data, 'baseModelInstance', '') - external_base_model_url = self.get_or_default(meta_data, - 'externalBaseModelUrl', - '') + external_base_model_url = self.get_or_default( + meta_data, 'externalBaseModelUrl', '') # validations if owner_slug == 'INSERT_OWNER_SLUG_HERE': @@ -3175,9 +3197,8 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): result = ModelNewResponse( self.process_response( self.with_retry( - self.models_create_instance_with_http_info)(owner_slug, - model_slug, - request))) + self.models_create_instance_with_http_info)( + owner_slug, model_slug, request))) return result @@ -3237,6 +3258,73 @@ def model_instance_delete_cli(self, model_instance, yes): else: print('The model instance was deleted.') + def model_instance_files(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ list all files for the current version of a model instance + + Parameters + ========== + model_instance: the string identifier of the model instance + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + if model_instance is None: + raise ValueError('A model_instance must be specified') + + self.validate_model_instance_string(model_instance) + urls = model_instance.split('/') + [owner_slug, model_slug, framework, instance_slug] = urls + + response = self.process_response( + self.model_instance_files_with_http_info( + owner_slug=owner_slug, + model_slug=model_slug, + framework=framework, + instance_slug=instance_slug, + page_size=page_size, + page_token=page_token, + _preload_content=True)) + + if response: + next_page_token = response['nextPageToken'] + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + return FileList(response) + else: + print('No files found') + + def model_instance_files_cli(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ client wrapper for model_instance_files. + + Parameters + ========== + model_instance: the string identified of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + result = self.model_instance_files( + model_instance, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) + if result and result.files is not None: + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) + def model_instance_update(self, folder): """ update a model instance. Parameters @@ -3264,9 +3352,8 @@ def model_instance_update(self, folder): 'modelInstanceType', None) base_model_instance = self.get_or_default(meta_data, 'baseModelInstance', None) - external_base_model_url = self.get_or_default(meta_data, - 'externalBaseModelUrl', - None) + external_base_model_url = self.get_or_default( + meta_data, 'externalBaseModelUrl', None) # validations if owner_slug == 'INSERT_OWNER_SLUG_HERE': @@ -3365,8 +3452,8 @@ def model_instance_version_create(self, owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string( model_instance) - request = ModelInstanceNewVersionRequest(version_notes=version_notes, - files=[]) + request = ModelInstanceNewVersionRequest( + version_notes=version_notes, files=[]) with ResumableUploadContext() as upload_context: self.upload_files(request, None, folder, ApiBlobType.MODEL, @@ -3396,9 +3483,8 @@ def model_instance_version_create_cli(self, quiet: suppress verbose output (default is False) dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload """ - result = self.model_instance_version_create(model_instance, folder, - version_notes, quiet, - dir_mode) + result = self.model_instance_version_create( + model_instance, folder, version_notes, quiet, dir_mode) if result.hasId: print('Your model instance version was created. Url={}'.format( @@ -3490,11 +3576,12 @@ def model_instance_version_download_cli(self, quiet: suppress verbose output (default is False) untar: if True, untar files upon download (default is False) """ - return self.model_instance_version_download(model_instance_version, - path=path, - untar=untar, - force=force, - quiet=quiet) + return self.model_instance_version_download( + model_instance_version, + path=path, + untar=untar, + force=force, + quiet=quiet) def model_instance_version_files(self, model_instance_version, @@ -3520,7 +3607,7 @@ def model_instance_version_files(self, version_number] = urls response = self.process_response( - self.model_instance_versions_files_with_http_info( + self.model_instance_version_files_with_http_info( owner_slug=owner_slug, model_slug=model_slug, framework=framework, @@ -3553,10 +3640,11 @@ def model_instance_version_files_cli(self, page_size: the number of items per page csv_display: if True, print comma separated values instead of table """ - result = self.model_instance_version_files(model_instance_version, - page_token=page_token, - page_size=page_size, - csv_display=csv_display) + result = self.model_instance_version_files( + model_instance_version, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) if result and result.files is not None: fields = ['name', 'size', 'creationDate'] if csv_display: @@ -3621,9 +3709,8 @@ def files_upload_cli(self, local_paths, inbox_path, no_resume, files_to_create = [] with ResumableUploadContext(no_resume) as upload_context: for local_path in local_paths: - (upload_file, - file_name) = self.file_upload_cli(local_path, inbox_path, - no_compress, upload_context) + (upload_file, file_name) = self.file_upload_cli( + local_path, inbox_path, no_compress, upload_context) if upload_file is None: continue @@ -3645,10 +3732,9 @@ def file_upload_cli(self, local_path, inbox_path, no_compress, file_or_folder_name = os.path.basename(full_path) dir_mode = 'tar' if no_compress else 'zip' - upload_file = self._upload_file_or_folder(parent_path, - file_or_folder_name, - ApiBlobType.INBOX, - upload_context, dir_mode) + upload_file = self._upload_file_or_folder( + parent_path, file_or_folder_name, ApiBlobType.INBOX, + upload_context, dir_mode) return (upload_file, file_or_folder_name) def print_obj(self, obj, indent=2): @@ -3698,8 +3784,8 @@ def print_table(self, items, fields): if len(items) == 0: return for f in fields: - length = max(len(f), - max([len(self.string(getattr(i, f))) for i in items])) + length = max( + len(f), max([len(self.string(getattr(i, f))) for i in items])) justify = '>' if isinstance(getattr( items[0], f), int) or f == 'size' or f == 'reward' else '<' formats.append('{:' + justify + self.string(length + 2) + '}') @@ -3851,10 +3937,9 @@ def upload_files(self, self.MODEL_INSTANCE_METADATA_FILE ]): continue - upload_file = self._upload_file_or_folder(folder, file_name, - blob_type, - upload_context, dir_mode, - quiet, resources) + upload_file = self._upload_file_or_folder( + folder, file_name, blob_type, upload_context, dir_mode, quiet, + resources) if upload_file is not None: request.files.append(upload_file) @@ -3936,9 +4021,9 @@ def process_column(self, column): ========== column: a list of values in a column to be processed """ - processed_column = DatasetColumn(name=self.get_or_fail(column, 'name'), - description=self.get_or_default( - column, 'description', '')) + processed_column = DatasetColumn( + name=self.get_or_fail(column, 'name'), + description=self.get_or_default(column, 'description', '')) if 'type' in column: original_type = column['type'].lower() processed_column.original_type = original_type @@ -3982,11 +4067,12 @@ def upload_complete(self, path, url, quiet, resume=False): start_at = resumable_upload_result.start_at upload_size = file_size - start_at - with tqdm(total=upload_size, - unit='B', - unit_scale=True, - unit_divisor=1024, - disable=quiet) as progress_bar: + with tqdm( + total=upload_size, + unit='B', + unit_scale=True, + unit_divisor=1024, + disable=quiet) as progress_bar: with io.open(path, 'rb', buffering=0) as fp: session = requests.Session() if start_at > 0: @@ -3995,8 +4081,8 @@ def upload_complete(self, path, url, quiet, resume=False): 'Content-Length': '%d' % upload_size, 'Content-Range': - 'bytes %d-%d/%d' % - (start_at, file_size - 1, file_size) + 'bytes %d-%d/%d' % (start_at, file_size - 1, + file_size) }) reader = TqdmBufferedReader(fp, progress_bar) retries = Retry(total=10, backoff_factor=0.5) @@ -4320,7 +4406,6 @@ def confirmation(self): class TqdmBufferedReader(io.BufferedReader): - def __init__(self, raw, progress_bar): """ helper class to implement an io.BufferedReader Parameters @@ -4350,7 +4435,6 @@ def increment(self, length): class FileList(object): - def __init__(self, init_dict): self.error_message = '' files = init_dict['files'] diff --git a/kaggle/cli.py b/kaggle/cli.py index 7d4a6ae..a5fea6e 100644 --- a/kaggle/cli.py +++ b/kaggle/cli.py @@ -44,14 +44,14 @@ def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument('-v', - '--version', - action='version', - version='Kaggle API ' + KaggleApi.__version__) - - subparsers = parser.add_subparsers(title='commands', - help=Help.kaggle, - dest='command') + parser.add_argument( + '-v', + '--version', + action='version', + version='Kaggle API ' + KaggleApi.__version__) + + subparsers = parser.add_subparsers( + title='commands', help=Help.kaggle, dest='command') subparsers.required = True subparsers.choices = Help.kaggle_choices parse_competitions(subparsers) @@ -137,22 +137,25 @@ def parse_competitions(subparsers): dest='sort_by', required=False, help=Help.param_competition_sort_by) - parser_competitions_list_optional.add_argument('-p', - '--page', - dest='page', - default=1, - required=False, - help=Help.param_page) - parser_competitions_list_optional.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_competitions_list_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_list_optional.add_argument( + '-p', + '--page', + dest='page', + default=1, + required=False, + help=Help.param_page) + parser_competitions_list_optional.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_competitions_list_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_list._action_groups.append( parser_competitions_list_optional) parser_competitions_list.set_defaults(func=api.competitions_list_cli) @@ -166,30 +169,35 @@ def parse_competitions(subparsers): ) parser_competitions_files_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_files_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) - parser_competitions_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_competitions_files_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_competitions_files_optional.add_argument('--page-token', - dest='page_token', - required=False, - help=Help.param_page_token) - parser_competitions_files_optional.add_argument('--page-size', - dest='page_size', - required=False, - default=20, - help=Help.param_page_size) + parser_competitions_files_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) + parser_competitions_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_competitions_files_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_competitions_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_competitions_files_optional.add_argument( + '--page-size', + dest='page_size', + required=False, + default=20, + help=Help.param_page_size) parser_competitions_files._action_groups.append( parser_competitions_files_optional) parser_competitions_files.set_defaults(func=api.competition_list_files_cli) @@ -203,11 +211,12 @@ def parse_competitions(subparsers): ) parser_competitions_download_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_download_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) + parser_competitions_download_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) parser_competitions_download_optional.add_argument( '-f', '--file', @@ -220,23 +229,26 @@ def parse_competitions(subparsers): dest='path', required=False, help=Help.param_downfolder) - parser_competitions_download_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_competitions_download_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - help=Help.param_force) - parser_competitions_download_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_download_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_competitions_download_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + help=Help.param_force) + parser_competitions_download_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_competitions_download._action_groups.append( parser_competitions_download_optional) parser_competitions_download.set_defaults( @@ -253,27 +265,30 @@ def parse_competitions(subparsers): 'required arguments') parser_competitions_submit_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_submit_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) - parser_competitions_submit_required.add_argument('-f', - '--file', - dest='file_name', - required=True, - help=Help.param_upfile) + parser_competitions_submit_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) + parser_competitions_submit_required.add_argument( + '-f', + '--file', + dest='file_name', + required=True, + help=Help.param_upfile) parser_competitions_submit_required.add_argument( '-m', '--message', dest='message', required=True, help=Help.param_competition_message) - parser_competitions_submit_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_submit_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_competitions_submit._action_groups.append( parser_competitions_submit_optional) parser_competitions_submit.set_defaults(func=api.competition_submit_cli) @@ -293,11 +308,12 @@ def parse_competitions(subparsers): dest='competition_opt', required=False, help=argparse.SUPPRESS) - parser_competitions_submissions_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_submissions_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_submissions_optional.add_argument( '-q', '--quiet', @@ -338,11 +354,12 @@ def parse_competitions(subparsers): help=Help.param_competition_leaderboard_download) parser_competitions_leaderboard_optional.add_argument( '-p', '--path', dest='path', help=Help.param_downfolder) - parser_competitions_leaderboard_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_leaderboard_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_leaderboard_optional.add_argument( '-q', '--quiet', @@ -367,8 +384,8 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.group_datasets, aliases=['d']) - subparsers_datasets = parser_datasets.add_subparsers(title='commands', - dest='command') + subparsers_datasets = parser_datasets.add_subparsers( + title='commands', dest='command') subparsers_datasets.required = True subparsers_datasets.choices = Help.datasets_choices @@ -378,59 +395,58 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_list) parser_datasets_list_optional = parser_datasets_list._action_groups.pop() - parser_datasets_list.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_dataset_sort_by) - parser_datasets_list.add_argument('--size', - dest='size', - required=False, - help=Help.param_dataset_size) - parser_datasets_list.add_argument('--file-type', - dest='file_type', - required=False, - help=Help.param_dataset_file_type) - parser_datasets_list.add_argument('--license', - dest='license_name', - required=False, - help=Help.param_dataset_license) - parser_datasets_list.add_argument('--tags', - dest='tag_ids', - required=False, - help=Help.param_dataset_tags) - parser_datasets_list.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_datasets_list.add_argument('-m', - '--mine', - dest='mine', - action='store_true', - help=Help.param_mine) - parser_datasets_list.add_argument('--user', - dest='user', - required=False, - help=Help.param_dataset_user) - parser_datasets_list.add_argument('-p', - '--page', - dest='page', - default=1, - required=False, - help=Help.param_page) - parser_datasets_list.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_datasets_list.add_argument('--max-size', - dest='max_size', - required=False, - help=Help.param_dataset_maxsize) - parser_datasets_list.add_argument('--min-size', - dest='min_size', - required=False, - help=Help.param_dataset_minsize) + parser_datasets_list.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_dataset_sort_by) + parser_datasets_list.add_argument( + '--size', dest='size', required=False, help=Help.param_dataset_size) + parser_datasets_list.add_argument( + '--file-type', + dest='file_type', + required=False, + help=Help.param_dataset_file_type) + parser_datasets_list.add_argument( + '--license', + dest='license_name', + required=False, + help=Help.param_dataset_license) + parser_datasets_list.add_argument( + '--tags', dest='tag_ids', required=False, help=Help.param_dataset_tags) + parser_datasets_list.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_datasets_list.add_argument( + '-m', '--mine', dest='mine', action='store_true', help=Help.param_mine) + parser_datasets_list.add_argument( + '--user', dest='user', required=False, help=Help.param_dataset_user) + parser_datasets_list.add_argument( + '-p', + '--page', + dest='page', + default=1, + required=False, + help=Help.param_page) + parser_datasets_list.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_datasets_list.add_argument( + '--max-size', + dest='max_size', + required=False, + help=Help.param_dataset_maxsize) + parser_datasets_list.add_argument( + '--min-size', + dest='min_size', + required=False, + help=Help.param_dataset_minsize) parser_datasets_list._action_groups.append(parser_datasets_list_optional) parser_datasets_list.set_defaults(func=api.dataset_list_cli) @@ -440,29 +456,31 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_files) parser_datasets_files_optional = parser_datasets_files._action_groups.pop() - parser_datasets_files_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_files_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) - parser_datasets_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_datasets_files_optional.add_argument('--page-token', - dest='page_token', - required=False, - help=Help.param_page_token) - parser_datasets_files_optional.add_argument('--page-size', - dest='page_size', - required=False, - default=20, - help=Help.param_page_size) + parser_datasets_files_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_files_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) + parser_datasets_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_datasets_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_datasets_files_optional.add_argument( + '--page-size', + dest='page_size', + required=False, + default=20, + help=Help.param_page_size) parser_datasets_files._action_groups.append(parser_datasets_files_optional) parser_datasets_files.set_defaults(func=api.dataset_list_files_cli) @@ -473,47 +491,48 @@ def parse_datasets(subparsers): help=Help.command_datasets_download) parser_datasets_download_optional = parser_datasets_download._action_groups.pop( ) - parser_datasets_download_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_download_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_download_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_download_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_download_optional.add_argument( '-f', '--file', dest='file_name', required=False, help=Help.param_dataset_file) - parser_datasets_download_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_datasets_download_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_datasets_download_optional.add_argument('--unzip', - dest='unzip', - action='store_true', - help=Help.param_unzip) - parser_datasets_download_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - help=Help.param_force) - parser_datasets_download_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_datasets_download_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_datasets_download_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_datasets_download_optional.add_argument( + '--unzip', dest='unzip', action='store_true', help=Help.param_unzip) + parser_datasets_download_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + help=Help.param_force) + parser_datasets_download_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_datasets_download._action_groups.append( parser_datasets_download_optional) parser_datasets_download.set_defaults(func=api.dataset_download_cli) @@ -531,21 +550,24 @@ def parse_datasets(subparsers): dest='folder', required=False, help=Help.param_dataset_upfile) - parser_datasets_create_optional.add_argument('-u', - '--public', - dest='public', - action='store_true', - help=Help.param_public) - parser_datasets_create_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_datasets_create_optional.add_argument('-t', - '--keep-tabular', - dest='convert_to_csv', - action='store_false', - help=Help.param_keep_tabular) + parser_datasets_create_optional.add_argument( + '-u', + '--public', + dest='public', + action='store_true', + help=Help.param_public) + parser_datasets_create_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_datasets_create_optional.add_argument( + '-t', + '--keep-tabular', + dest='convert_to_csv', + action='store_false', + help=Help.param_keep_tabular) parser_datasets_create_optional.add_argument( '-r', '--dir-mode', @@ -578,16 +600,18 @@ def parse_datasets(subparsers): dest='folder', required=False, help=Help.param_dataset_upfile) - parser_datasets_version_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_datasets_version_optional.add_argument('-t', - '--keep-tabular', - dest='convert_to_csv', - action='store_false', - help=Help.param_keep_tabular) + parser_datasets_version_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_datasets_version_optional.add_argument( + '-t', + '--keep-tabular', + dest='convert_to_csv', + action='store_false', + help=Help.param_keep_tabular) parser_datasets_version_optional.add_argument( '-r', '--dir-mode', @@ -611,11 +635,12 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_init) parser_datasets_init_optional = parser_datasets_init._action_groups.pop() - parser_datasets_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_dataset_upfile) + parser_datasets_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_dataset_upfile) parser_datasets_init._action_groups.append(parser_datasets_init_optional) parser_datasets_init.set_defaults(func=api.dataset_initialize_cli) @@ -626,15 +651,14 @@ def parse_datasets(subparsers): help=Help.command_datasets_metadata) parser_datasets_metadata_optional = parser_datasets_metadata._action_groups.pop( ) - parser_datasets_metadata_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_metadata_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_metadata_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_metadata_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_metadata_optional.add_argument( '--update', dest='update', @@ -653,15 +677,14 @@ def parse_datasets(subparsers): help=Help.command_datasets_status) parser_datasets_status_optional = parser_datasets_status._action_groups.pop( ) - parser_datasets_status_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_status_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_status_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_status_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_status._action_groups.append( parser_datasets_status_optional) parser_datasets_status.set_defaults(func=api.dataset_status_cli) @@ -679,8 +702,8 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.group_kernels, aliases=['k']) - subparsers_kernels = parser_kernels.add_subparsers(title='commands', - dest='command') + subparsers_kernels = parser_kernels.add_subparsers( + title='commands', dest='command') subparsers_kernels.required = True subparsers_kernels.choices = Help.kernels_choices @@ -690,63 +713,57 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_list) parser_kernels_list_optional = parser_kernels_list._action_groups.pop() - parser_kernels_list_optional.add_argument('-m', - '--mine', - dest='mine', - action='store_true', - help=Help.param_mine) - parser_kernels_list_optional.add_argument('-p', - '--page', - dest='page', - default=1, - help=Help.param_page) - parser_kernels_list_optional.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) - parser_kernels_list_optional.add_argument('-s', - '--search', - dest='search', - help=Help.param_search) - parser_kernels_list_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_kernels_list_optional.add_argument('--parent', - dest='parent', - required=False, - help=Help.param_kernel_parent) + parser_kernels_list_optional.add_argument( + '-m', '--mine', dest='mine', action='store_true', help=Help.param_mine) + parser_kernels_list_optional.add_argument( + '-p', '--page', dest='page', default=1, help=Help.param_page) + parser_kernels_list_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_kernels_list_optional.add_argument( + '-s', '--search', dest='search', help=Help.param_search) + parser_kernels_list_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_kernels_list_optional.add_argument( + '--parent', + dest='parent', + required=False, + help=Help.param_kernel_parent) parser_kernels_list_optional.add_argument( '--competition', dest='competition', required=False, help=Help.param_kernel_competition) - parser_kernels_list_optional.add_argument('--dataset', - dest='dataset', - required=False, - help=Help.param_kernel_dataset) - parser_kernels_list_optional.add_argument('--user', - dest='user', - required=False, - help=Help.param_kernel_user) - parser_kernels_list_optional.add_argument('--language', - dest='language', - required=False, - help=Help.param_kernel_language) - parser_kernels_list_optional.add_argument('--kernel-type', - dest='kernel_type', - required=False, - help=Help.param_kernel_type) + parser_kernels_list_optional.add_argument( + '--dataset', + dest='dataset', + required=False, + help=Help.param_kernel_dataset) + parser_kernels_list_optional.add_argument( + '--user', dest='user', required=False, help=Help.param_kernel_user) + parser_kernels_list_optional.add_argument( + '--language', + dest='language', + required=False, + help=Help.param_kernel_language) + parser_kernels_list_optional.add_argument( + '--kernel-type', + dest='kernel_type', + required=False, + help=Help.param_kernel_type) parser_kernels_list_optional.add_argument( '--output-type', dest='output_type', required=False, help=Help.param_kernel_output_type) - parser_kernels_list_optional.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_kernel_sort_by) + parser_kernels_list_optional.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_kernel_sort_by) parser_kernels_list._action_groups.append(parser_kernels_list_optional) parser_kernels_list.set_defaults(func=api.kernels_list_cli) @@ -756,27 +773,24 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_files) parser_kernels_files_optional = parser_kernels_files._action_groups.pop() - parser_kernels_files_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_files_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) - parser_kernels_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_kernels_files_optional.add_argument('--page-token', - dest='page_token', - help=Help.param_page_token) - parser_kernels_files_optional.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) + parser_kernels_files_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_files_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) + parser_kernels_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_kernels_files_optional.add_argument( + '--page-token', dest='page_token', help=Help.param_page_token) + parser_kernels_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) parser_kernels_files._action_groups.append(parser_kernels_files_optional) parser_kernels_files.set_defaults(func=api.kernels_list_files_cli) @@ -786,11 +800,12 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_init) parser_kernels_init_optional = parser_kernels_init._action_groups.pop() - parser_kernels_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_kernel_upfile) + parser_kernels_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_kernel_upfile) parser_kernels_init._action_groups.append(parser_kernels_init_optional) parser_kernels_init.set_defaults(func=api.kernels_initialize_cli) @@ -800,11 +815,12 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_push) parser_kernels_push_optional = parser_kernels_push._action_groups.pop() - parser_kernels_push_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_kernel_upfile) + parser_kernels_push_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_kernel_upfile) parser_kernels_push._action_groups.append(parser_kernels_push_optional) parser_kernels_push.set_defaults(func=api.kernels_push_cli) @@ -814,27 +830,28 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_pull) parser_kernels_pull_optional = parser_kernels_pull._action_groups.pop() - parser_kernels_pull_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_pull_optional.add_argument('-k', - '--kernel', - dest='kernel', - required=False, - help=argparse.SUPPRESS) - parser_kernels_pull_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_kernels_pull_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) + parser_kernels_pull_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_pull_optional.add_argument( + '-k', + '--kernel', + dest='kernel', + required=False, + help=argparse.SUPPRESS) + parser_kernels_pull_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_kernels_pull_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) parser_kernels_pull_optional.add_argument( '-m', '--metadata', @@ -850,39 +867,42 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_output) parser_kernels_output_optional = parser_kernels_output._action_groups.pop() - parser_kernels_output_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_output_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) - parser_kernels_output_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_kernels_output_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_kernels_output_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - required=False, - help=Help.param_force) - parser_kernels_output_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - required=False, - help=Help.param_quiet) + parser_kernels_output_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_output_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) + parser_kernels_output_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_kernels_output_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_kernels_output_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + required=False, + help=Help.param_force) + parser_kernels_output_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + required=False, + help=Help.param_quiet) parser_kernels_output._action_groups.append(parser_kernels_output_optional) parser_kernels_output.set_defaults(func=api.kernels_output_cli) @@ -892,15 +912,14 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_status) parser_kernels_status_optional = parser_kernels_status._action_groups.pop() - parser_kernels_status_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_status_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) + parser_kernels_status_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_status_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) parser_kernels_status._action_groups.append(parser_kernels_status_optional) parser_kernels_status.set_defaults(func=api.kernels_status_cli) @@ -912,8 +931,8 @@ def parse_models(subparsers): help=Help.group_models, aliases=['m']) - subparsers_models = parser_models.add_subparsers(title='commands', - dest='command') + subparsers_models = parser_models.add_subparsers( + title='commands', dest='command') subparsers_models.required = True subparsers_models.choices = Help.models_choices @@ -927,11 +946,12 @@ def parse_models(subparsers): help=Help.command_models_get) parser_models_get_optional = parser_models_get._action_groups.pop() parser_models_get_optional.add_argument('model', help=Help.param_model) - parser_models_get_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_downfile) + parser_models_get_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_downfile) parser_models_get._action_groups.append(parser_models_get_optional) parser_models_get.set_defaults(func=api.model_get_cli) @@ -941,32 +961,32 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_list) parser_models_list_optional = parser_models_list._action_groups.pop() - parser_models_list.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_model_sort_by) - parser_models_list.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_models_list.add_argument('--owner', - dest='owner', - required=False, - help=Help.param_model_owner) - parser_models_list.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) - parser_models_list.add_argument('--page-token', - dest='page_token', - required=False, - help=Help.param_page_token) - parser_models_list.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_models_list.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_model_sort_by) + parser_models_list.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_models_list.add_argument( + '--owner', dest='owner', required=False, help=Help.param_model_owner) + parser_models_list.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_models_list.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_models_list.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_models_list._action_groups.append(parser_models_list_optional) parser_models_list.set_defaults(func=api.model_list_cli) @@ -976,11 +996,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_init) parser_models_init_optional = parser_models_init._action_groups.pop() - parser_models_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_init._action_groups.append(parser_models_init_optional) parser_models_init.set_defaults(func=api.model_initialize_cli) @@ -990,11 +1011,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_new) parser_models_create_optional = parser_models_create._action_groups.pop() - parser_models_create_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_create_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_create._action_groups.append(parser_models_create_optional) parser_models_create.set_defaults(func=api.model_create_new_cli) @@ -1005,11 +1027,8 @@ def parse_models(subparsers): help=Help.command_models_delete) parser_models_delete_optional = parser_models_delete._action_groups.pop() parser_models_delete_optional.add_argument('model', help=Help.param_model) - parser_models_delete_optional.add_argument('-y', - '--yes', - dest='yes', - action='store_true', - help=Help.param_yes) + parser_models_delete_optional.add_argument( + '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes) parser_models_delete._action_groups.append(parser_models_delete_optional) parser_models_delete.set_defaults(func=api.model_delete_cli) @@ -1019,11 +1038,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_update) parser_models_update_optional = parser_models_update._action_groups.pop() - parser_models_update_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_update_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_update._action_groups.append(parser_models_update_optional) parser_models_update.set_defaults(func=api.model_update_cli) @@ -1093,11 +1113,12 @@ def parse_model_instances(subparsers): dest='folder', required=False, help=Help.param_model_instance_upfile) - parser_model_instances_create_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_model_instances_create_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_model_instances_create_optional.add_argument( '-r', '--dir-mode', @@ -1110,6 +1131,33 @@ def parse_model_instances(subparsers): parser_model_instances_create.set_defaults( func=api.model_instance_create_cli) + # Model Instances files + parser_model_instances_files = subparsers_model_instances.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_model_instances_files) + parser_model_instances_files_optional = parser_model_instances_files._action_groups.pop( + ) + parser_model_instances_files_optional.add_argument( + 'model_instance', help=Help.param_model_instance) + parser_model_instances_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_model_instances_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_model_instances_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_model_instances_files._action_groups.append( + parser_model_instances_files_optional) + parser_model_instances_files.set_defaults( + func=api.model_instance_files_cli) + # Models Instances delete parser_model_instances_delete = subparsers_model_instances.add_parser( 'delete', @@ -1119,11 +1167,8 @@ def parse_model_instances(subparsers): ) parser_model_instances_delete_optional.add_argument( 'model_instance', help=Help.param_model_instance) - parser_model_instances_delete_optional.add_argument('-y', - '--yes', - dest='yes', - action='store_true', - help=Help.param_yes) + parser_model_instances_delete_optional.add_argument( + '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes) parser_model_instances_delete._action_groups.append( parser_model_instances_delete_optional) parser_model_instances_delete.set_defaults( @@ -1284,8 +1329,8 @@ def parse_files(subparsers): help=Help.group_files, aliases=['f']) - subparsers_files = parser_files.add_subparsers(title='commands', - dest='command') + subparsers_files = parser_files.add_subparsers( + title='commands', dest='command') subparsers_files.required = True subparsers_files.choices = Help.files_choices @@ -1331,8 +1376,8 @@ def parse_config(subparsers): 'config', formatter_class=argparse.RawTextHelpFormatter, help=Help.group_config) - subparsers_config = parser_config.add_subparsers(title='commands', - dest='command') + subparsers_config = parser_config.add_subparsers( + title='commands', dest='command') subparsers_config.required = True subparsers_config.choices = Help.config_choices @@ -1349,16 +1394,18 @@ def parse_config(subparsers): parser_config_set._action_groups.pop() parser_config_set_required = parser_config_set.add_argument_group( 'required arguments') - parser_config_set_required.add_argument('-n', - '--name', - dest='name', - required=True, - help=Help.param_config_name) - parser_config_set_required.add_argument('-v', - '--value', - dest='value', - required=True, - help=Help.param_config_value) + parser_config_set_required.add_argument( + '-n', + '--name', + dest='name', + required=True, + help=Help.param_config_name) + parser_config_set_required.add_argument( + '-v', + '--value', + dest='value', + required=True, + help=Help.param_config_value) parser_config_set.set_defaults(func=api.set_config_value) parser_config_unset = subparsers_config.add_parser( @@ -1368,11 +1415,12 @@ def parse_config(subparsers): parser_config_unset._action_groups.pop() parser_config_unset_required = parser_config_unset.add_argument_group( 'required arguments') - parser_config_unset_required.add_argument('-n', - '--name', - dest='name', - required=True, - help=Help.param_config_name) + parser_config_unset_required.add_argument( + '-n', + '--name', + dest='name', + required=True, + help=Help.param_config_name) parser_config_unset.set_defaults(func=api.unset_config_value) @@ -1395,7 +1443,7 @@ class Help(object): 'instances', 'get', 'list', 'init', 'create', 'delete', 'update' ] model_instances_choices = [ - 'versions', 'get', 'init', 'create', 'delete', 'update' + 'versions', 'get', 'files', 'init', 'create', 'delete', 'update' ] model_instance_versions_choices = [ 'init', 'create', 'download', 'delete', 'files' @@ -1635,6 +1683,7 @@ class Help(object): ) command_model_instances_get = 'Get a model instance' command_model_instances_init = 'Initialize metadata file for model instance creation' + command_model_instances_files = 'List files for the current version of a model instance' command_model_instances_new = 'Create a new model instance' param_model_instance_downfile = ( 'Folder for downloading the special model-instance-metadata.json file ' diff --git a/kaggle/models/kaggle_models_extended.py b/kaggle/models/kaggle_models_extended.py index 3f11b6d..f285f55 100644 --- a/kaggle/models/kaggle_models_extended.py +++ b/kaggle/models/kaggle_models_extended.py @@ -37,7 +37,6 @@ class Competition(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -48,7 +47,6 @@ def __repr__(self): class SubmitResult(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -58,7 +56,6 @@ def __repr__(self): class Submission(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -72,7 +69,6 @@ def __repr__(self): class LeaderboardEntry(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -82,7 +78,6 @@ def __repr__(self): class Dataset(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -96,7 +91,6 @@ def __repr__(self): class Model(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -106,7 +100,6 @@ def __repr__(self): class Metadata(object): - def __init__(self, init_info): parsed_info = {k: parse(v) for k, v in init_info.items()} # backwards compatibility @@ -119,7 +112,6 @@ def __repr__(self): class DatasetVersion(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -129,7 +121,6 @@ def __repr__(self): class File(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -149,7 +140,6 @@ def get_size(size, precision=0): class Tag(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -159,7 +149,6 @@ def __repr__(self): class DatasetNewVersionResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -169,7 +158,6 @@ def __repr__(self): class DatasetNewResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -179,7 +167,6 @@ def __repr__(self): class ListFilesResult(object): - def __init__(self, init_dict): self.error_message = init_dict['errorMessage'] files = init_dict['datasetFiles'] @@ -198,7 +185,6 @@ def __repr__(self): class Kernel: - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -208,7 +194,6 @@ def __repr__(self): class KernelPushResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -218,7 +203,6 @@ def __repr__(self): class ModelNewResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -228,7 +212,6 @@ def __repr__(self): class ModelDeleteResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) diff --git a/kaggle/test/test_authenticate.py b/kaggle/test/test_authenticate.py index af8e92f..06d8787 100644 --- a/kaggle/test/test_authenticate.py +++ b/kaggle/test/test_authenticate.py @@ -23,7 +23,6 @@ class TestAuthenticate(unittest.TestCase): - def setUp(self): print("setup class:%s" % self) diff --git a/src/KaggleSwagger.yaml b/src/KaggleSwagger.yaml index 348d523..a9ceefb 100644 --- a/src/KaggleSwagger.yaml +++ b/src/KaggleSwagger.yaml @@ -703,7 +703,7 @@ paths: name: pageSize type: integer default: 20 - description: Page size + description: Number of items per page (default 20) - in: query name: search default: "" @@ -803,7 +803,7 @@ paths: name: pageSize type: integer default: 20 - description: Page size + description: Number of items per page (default 20) - in: query name: pageToken type: string @@ -1056,7 +1056,7 @@ paths: name: pageSize type: integer default: 20 - description: Page size + description: Number of items per page (default 20) - in: query name: pageToken type: string @@ -1070,12 +1070,12 @@ paths: description: Error schema: $ref: "#/definitions/Error" - /models/list/{ownerSlug}/{modelSlug}: + /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files: get: tags: - kaggle - summary: List model files - operationId: ModelsListFiles + summary: List model instance files for the current version + operationId: ModelInstanceFiles produces: - application/json parameters: @@ -1089,16 +1089,21 @@ paths: required: true type: string description: Model name - - in: query - name: modelVersionNumber - required: false + - in: path + name: framework + required: true type: string - description: Model version number + description: Model instance framework + - in: path + name: instanceSlug + required: true + type: string + description: Model instance slug - in: query name: pageSize type: integer default: 20 - description: Page size + description: Number of items per page (default 20) - in: query name: pageToken type: string @@ -1505,7 +1510,7 @@ paths: tags: - kaggle summary: List model instance version files - operationId: ModelInstanceVersionsFiles + operationId: ModelInstanceVersionFiles produces: - application/json parameters: @@ -1538,7 +1543,7 @@ paths: name: pageSize type: integer default: 20 - description: Page size + description: Number of items per page (default 20) - in: query name: pageToken type: string diff --git a/src/kaggle/api/kaggle_api_extended.py b/src/kaggle/api/kaggle_api_extended.py index 654a004..e7fd172 100644 --- a/src/kaggle/api/kaggle_api_extended.py +++ b/src/kaggle/api/kaggle_api_extended.py @@ -3242,6 +3242,73 @@ def model_instance_delete_cli(self, model_instance, yes): else: print('The model instance was deleted.') + def model_instance_files(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ list all files for the current version of a model instance + + Parameters + ========== + model_instance: the string identifier of the model instance + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + if model_instance is None: + raise ValueError('A model_instance must be specified') + + self.validate_model_instance_string(model_instance) + urls = model_instance.split('/') + [owner_slug, model_slug, framework, instance_slug] = urls + + response = self.process_response( + self.model_instance_files_with_http_info( + owner_slug=owner_slug, + model_slug=model_slug, + framework=framework, + instance_slug=instance_slug, + page_size=page_size, + page_token=page_token, + _preload_content=True)) + + if response: + next_page_token = response['nextPageToken'] + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + return FileList(response) + else: + print('No files found') + + def model_instance_files_cli(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ client wrapper for model_instance_files. + + Parameters + ========== + model_instance: the string identified of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + result = self.model_instance_files( + model_instance, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) + if result and result.files is not None: + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) + def model_instance_update(self, folder): """ update a model instance. Parameters @@ -3524,7 +3591,7 @@ def model_instance_version_files(self, version_number] = urls response = self.process_response( - self.model_instance_versions_files_with_http_info( + self.model_instance_version_files_with_http_info( owner_slug=owner_slug, model_slug=model_slug, framework=framework, diff --git a/src/kaggle/cli.py b/src/kaggle/cli.py index 83c3fec..0fe5e65 100644 --- a/src/kaggle/cli.py +++ b/src/kaggle/cli.py @@ -1115,6 +1115,33 @@ def parse_model_instances(subparsers): parser_model_instances_create.set_defaults( func=api.model_instance_create_cli) + # Model Instances files + parser_model_instances_files = subparsers_model_instances.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_model_instances_files) + parser_model_instances_files_optional = parser_model_instances_files._action_groups.pop( + ) + parser_model_instances_files_optional.add_argument( + 'model_instance', help=Help.param_model_instance) + parser_model_instances_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_model_instances_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_model_instances_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_model_instances_files._action_groups.append( + parser_model_instances_files_optional) + parser_model_instances_files.set_defaults( + func=api.model_instance_files_cli) + # Models Instances delete parser_model_instances_delete = subparsers_model_instances.add_parser( 'delete', @@ -1400,7 +1427,7 @@ class Help(object): 'instances', 'get', 'list', 'init', 'create', 'delete', 'update' ] model_instances_choices = [ - 'versions', 'get', 'init', 'create', 'delete', 'update' + 'versions', 'get', 'files', 'init', 'create', 'delete', 'update' ] model_instance_versions_choices = [ 'init', 'create', 'download', 'delete', 'files' @@ -1640,6 +1667,7 @@ class Help(object): ) command_model_instances_get = 'Get a model instance' command_model_instances_init = 'Initialize metadata file for model instance creation' + command_model_instances_files = 'List files for the current version of a model instance' command_model_instances_new = 'Create a new model instance' param_model_instance_downfile = ( 'Folder for downloading the special model-instance-metadata.json file ' diff --git a/tests/test_commands.sh b/tests/test_commands.sh index 8d050e1..0c2493e 100755 --- a/tests/test_commands.sh +++ b/tests/test_commands.sh @@ -7,6 +7,6 @@ # Pagination for listing of competitions, datasets, and kernels is out-of-scope for current work. kaggle competitions files titanic --page-size=3 --page-token=abcd -kaggle kernels files hermengardo/ps4e4-ensemble-eda --page-size=5 # valid page token required -kaggle datasets files nelgiriyewithana/apple-quality --page-size=7 --page-token=abcd +kaggle kernels files kerneler/sqlite-global-default --page-size=1 # valid page token required +kaggle datasets files kerneler/brazilian-bird-observation-metadata-from-wikiaves --page-size=7 --page-token=abcd kaggle models instances versions files google/gemma/pytorch/7b/2 --page-size=3 --page-token=abcd