diff --git a/.github/workflows/build_cli.yml b/.github/workflows/build_cli.yml new file mode 100644 index 0000000..d7d3ece --- /dev/null +++ b/.github/workflows/build_cli.yml @@ -0,0 +1,30 @@ +name: Integration Tests + +on: [push] + +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + max-parallel: 1 + matrix: + os: ['windows-latest', 'ubuntu-latest', 'macos-latest'] + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - run: pip install . + - run: pip install pyinstaller + - run: pyinstaller -F ./dvuploader/cli.py -n dvuploader-${{ matrix.os }} --distpath ./bin + - name: Push + run: | + git config --global user.name "Build Bot" + git config --global user.email "build.bot@bot.com" + + git pull + git add ./bin/\* + git commit -a -m "šŸ¤– Built DVUploader for ${{ matrix.os }}" + git push diff --git a/README.md b/README.md index 1b37c41..9ba5fa8 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,8 @@ python3 -m pip install . ## Quickstart +### Programmatic usage + In order to perform a direct upload, you need to have a Dataverse instance running and a cloud storage provider. The following example shows how to upload files to a Dataverse instance. Simply provide the files of interest and utilize the `upload` method of a `DVUploader` instance. ```python @@ -59,3 +61,55 @@ dvuploader.upload( ) ``` +### Command Line Interface + +DVUploader ships with a CLI ready to use outside scripts. In order to upload files to a Dataverse instance, simply provide the files of interest, persistent identifier and credentials. + +#### Using arguments + +```bash +dvuploader my_file.txt my_other_file.txt \ + --pid doi:10.70122/XXX/XXXXX \ + --api-token XXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX \ + --dataverse-url https://demo.dataverse.org/ \ +``` + +#### Using a config file + +Alternatively, you can also supply a `config` file that contains all necessary informations for the uploader. The `config` file is a JSON/YAML file that contains the following keys: + +* `persistent_id`: Persistent identifier of the dataset to upload to. +* `dataverse_url`: URL of the Dataverse instance. +* `api_token`: API token of the Dataverse instance. +* `files`: List of files to upload. Each file is a dictionary with the following keys: + * `filepath`: Path to the file to upload. + * `directoryLabel`: Optional directory label to upload the file to. + * `description`: Optional description of the file. + * `mimetype`: Mimetype of the file. + * `categories`: Optional list of categories to assign to the file. + * `restrict`: Boolean to indicate that this is a restricted file. Defaults to False. + +In the following example, we upload three files to a Dataverse instance. The first file is uploaded to the root directory of the dataset, while the other two files are uploaded to the directory `some/dir`. + +```yaml +# config.yml +persistent_id: doi:10.70122/XXX/XXXXX +dataverse_url: https://demo.dataverse.org/ +api_token: XXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX +files: + - filepath: ./small.txt + - filepath: ./medium.txt + directoryLabel: some/dir + - filepath: ./big.txt + directoryLabel: some/dir +``` + +The `config` file can then be used as follows: + +```bash +dvuploader --config-path config.yml +``` + +#### CLI Binaries + +DVUploader ships with binaries for Linux, MacOS and Windows. You can download the binaries from the `bin` [directory](./bin) and use them in a similar fashion as described above. diff --git a/bin/dvuploader-macos-latest b/bin/dvuploader-macos-latest new file mode 100755 index 0000000..7044a0a Binary files /dev/null and b/bin/dvuploader-macos-latest differ diff --git a/bin/dvuploader-ubuntu-latest b/bin/dvuploader-ubuntu-latest new file mode 100755 index 0000000..99888c0 Binary files /dev/null and b/bin/dvuploader-ubuntu-latest differ diff --git a/bin/dvuploader-windows-latest.exe b/bin/dvuploader-windows-latest.exe new file mode 100644 index 0000000..45c41f0 Binary files /dev/null and b/bin/dvuploader-windows-latest.exe differ diff --git a/dvuploader/cli.py b/dvuploader/cli.py new file mode 100644 index 0000000..5429cea --- /dev/null +++ b/dvuploader/cli.py @@ -0,0 +1,121 @@ +import yaml +import typer + +from pydantic import BaseModel +from typing import List, Tuple +from dvuploader import DVUploader, File + + +class CliInput(BaseModel): + api_token: str + dataverse_url: str + persistent_id: str + files: List[File] + + +app = typer.Typer() + + +def _parse_yaml_config(path: str) -> Tuple[List[File], str, str, str]: + """ + Parses a configuration file and returns a Class instance + containing a list of File objects, a persistent ID, a Dataverse URL, + and an API token. + + Args: + path (str): Path to a JSON/YAML file containing specifications for the files to upload. + + Returns: + CliInput: Class instance containing a list of File objects, a persistent ID, + a Dataverse URL, and an API token. + + Raises: + ValueError: If the configuration file is invalid. + """ + return CliInput(**yaml.safe_load(open(path))) + + +def _validate_inputs( + filepaths: List[str], + pid: str, + dataverse_url: str, + api_token: str, + config_path: str, +) -> None: + if config_path and len(filepaths) > 0: + raise typer.BadParameter( + "Cannot specify both a JSON/YAML file and a list of filepaths." + ) + + _has_meta_params = all(arg is not None for arg in [pid, dataverse_url, api_token]) + _has_config_file = config_path is not None + + if _has_meta_params and _has_config_file: + print( + "\nāš ļø Warning\n" + "ā”œā”€ā”€ You have specified both a configuration file and metadata parameters via the command line.\n" + "╰── Will use metadata parameters specified in the config file." + ) + elif not _has_meta_params and not _has_config_file: + raise typer.BadParameter( + "You must specify either a JSON/YAML file or metadata parameters (dv_url, api_token, pid, files) via the command line." + ) + + +@app.command() +def main( + filepaths: List[str] = typer.Argument( + default=None, + help="A list of filepaths to upload.", + ), + pid: str = typer.Option( + default=None, + help="The persistent identifier of the Dataverse dataset.", + ), + api_token: str = typer.Option( + default=None, + help="The API token for the Dataverse repository.", + ), + dataverse_url: str = typer.Option( + default=None, + help="The URL of the Dataverse repository.", + ), + config_path: str = typer.Option( + default=None, + help="Path to a JSON/YAML file containing specifications for the files to upload. Defaults to None.", + ), + n_jobs: int = typer.Option( + default=-1, + help="The number of parallel jobs to run. Defaults to -1.", + ), +): + _validate_inputs( + filepaths=filepaths, + pid=pid, + dataverse_url=dataverse_url, + api_token=api_token, + config_path=config_path, + ) + + if config_path: + # PyYAML is a superset of JSON, so we can use the same function to parse both + cli_input = _parse_yaml_config(config_path) + else: + cli_input = CliInput( + api_token=api_token, + dataverse_url=dataverse_url, + persistent_id=pid, + files=[File(filepath=filepath) for filepath in filepaths], + ) + + uploader = DVUploader(files=cli_input.files) + uploader.upload( + persistent_id=cli_input.persistent_id, + dataverse_url=cli_input.dataverse_url, + api_token=cli_input.api_token, + n_jobs=n_jobs, + ) + + +if __name__ == "__main__": + typer.run(main) diff --git a/dvuploader/dvuploader.py b/dvuploader/dvuploader.py index 26ad55d..158866c 100644 --- a/dvuploader/dvuploader.py +++ b/dvuploader/dvuploader.py @@ -5,7 +5,7 @@ from typing import Dict, List from urllib.parse import urljoin -from pydantic import BaseModel +from pydantic import BaseModel, validator from joblib import Parallel, delayed from dotted_dict import DottedDict @@ -43,7 +43,7 @@ def upload( dataverse_url (str): The URL of the Dataverse repository. api_token (str): The API token for the Dataverse repository. n_jobs (int): The number of parallel jobs to run. Defaults to -1. - + Returns: None """ @@ -61,7 +61,7 @@ def upload( ) if not self.files: - print("\nāŒ No files to upload") + print("\nāŒ No files to upload\n") return # Upload files in parallel @@ -78,7 +78,7 @@ def upload( for position, file in enumerate(files) ) - print("šŸŽ‰ Done!") + print("šŸŽ‰ Done!\n") def _check_duplicates( self, @@ -104,9 +104,9 @@ def _check_duplicates( ) print("\nšŸ”Ž Checking dataset files") - + to_remove = [] - + for file in self.files: if any(map(lambda dsFile: self._check_hashes(file, dsFile), ds_files)): print( @@ -115,12 +115,12 @@ def _check_duplicates( to_remove.append(file) else: print(f"ā”œā”€ā”€ File '{file.fileName}' is new - Uploading.") - + for file in to_remove: self.files.remove(file) print("šŸŽ‰ Done") - + @staticmethod def _check_hashes(file: File, dsFile: Dict): """ @@ -133,7 +133,7 @@ def _check_hashes(file: File, dsFile: Dict): Returns: bool: True if the files have the same checksum, False otherwise. """ - + hash_algo, hash_value = tuple(dsFile.dataFile.checksum.values()) return file.checksum.value == hash_value and file.checksum.type == hash_algo diff --git a/dvuploader/file.py b/dvuploader/file.py index 95de7c2..e4263fd 100644 --- a/dvuploader/file.py +++ b/dvuploader/file.py @@ -1,7 +1,7 @@ import os from typing import List, Optional -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field, validator, ValidationError from dvuploader.checksum import Checksum, ChecksumTypes @@ -21,12 +21,25 @@ class File(BaseModel): fileName: Optional[str] = None checksum: Optional[Checksum] = None + @staticmethod + def _validate_filepath(path): + if not os.path.exists(path): + raise FileNotFoundError(f"Filepath {path} does not exist.") + elif not os.path.isfile(path): + raise TypeError(f"Filepath {path} is not a file.") + elif not os.access(path, os.R_OK): + raise TypeError(f"Filepath {path} is not readable.") + elif os.path.getsize(path) == 0: + raise ValueError(f"Filepath {path} is empty.") + return path + @validator("fileName", always=True) def _extract_filename(cls, v, values): return os.path.basename(values["filepath"]) @validator("checksum", always=True) def _calculate_hash(cls, v, values): + cls._validate_filepath(values["filepath"]) fpath = values["filepath"] hash_algo, hash_fun = values["checksum_type"].value diff --git a/poetry.lock b/poetry.lock index 2580e25..1f44b61 100644 --- a/poetry.lock +++ b/poetry.lock @@ -171,6 +171,21 @@ files = [ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [[package]] name = "colorama" version = "0.4.6" @@ -371,6 +386,26 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-metadata" +version = "6.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "joblib" version = "1.3.2" @@ -445,6 +480,65 @@ typing-extensions = ">=4.1.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "requests" version = "2.31.0" @@ -502,6 +596,27 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + [[package]] name = "typing-extensions" version = "4.7.1" @@ -530,6 +645,21 @@ secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17. socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + [[package]] name = "zope-event" version = "5.0" @@ -598,4 +728,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "7b0722a2f3c18e50fb0be44c8a1847c4a89e4270f1069c749da1eefd27a3f153" +content-hash = "10cea9c3556a1b761c2f3223d201e9053ff75577a4bc8c86505d3745b510a7b8" diff --git a/pyproject.toml b/pyproject.toml index 46cfc18..fa76c91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "dvuploader" -version = "0.1.0" +version = "0.2.0" description = "Python library for uploading (bulk) data to Dataverse" authors = ["Jan Range"] readme = "README.md" @@ -13,7 +13,11 @@ tqdm = "^4.66.1" pydantic = "1.10" requests = "^2.31.0" dotted-dict = "^1.1.3" +typer = "^0.9.0" +pyyaml = "^6.0.1" +[tool.poetry.scripts] +dvuploader = "dvuploader.cli:app" [build-system] requires = ["poetry-core"]