� ���g���+�� �dZddlZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z ddlmZmZddlmZddlmZmZddlmZddlmZmZmZddlZddlZddlZdd lm Z dd l!m"Z"m#Z#m$Z$dd l%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,d d l-m.Z.m/Z/d dl0m1Z1d dl2m3Z3m4Z4d dl5m6Z6m7Z7m8Z8m9Z9m:Z:m;Z;d dl<m=Z=m>Z>d dl?m@Z@d dlAmBZBd dlCmDZDmEZEmFZFmGZGd dlHmIZImJZJd dlKmLZLd dlMmNZNd dlOmPZPmQZQd dlRmSZSd dlTmUZUmVZVd dlWmXZXmYZYmZZZm[Z[m\Z\d dl]m^Z^d dl_m`Z`d dlambZbd dlcmdZdmeZemfZfmgZgmhZhmiZimjZjd d lkmlZld d!lmmnZnmoZod d"lpmqZqd d#lrmsZsd d$ltmuZumvZvd d%lwmxZxd d&lymzZzeqe{��Z|e}eXj~����d'gzZd(�Z�d)ee�d*e�d+e�fd,�Z�e/j�dfd-e�d.eeee�ffd/�Z�d+ee�e4fd0�Z�Gd1�d2��Z�d3e�e4d4e}e3d5ee�d6e�d+e�e4f d7�Z� d�d8d9d6ee�d+e�e4fd:�Z�d;e}e�d+e�fd<�Z�d-e�fd=�Z�d-e�d>e�d?e�e�e�e�e�fd@ee@d+e�e}e�e�e�fe}e�e�e�fff dA�Z�d-e�dBe}e�e�e�fd+dfdC�Z�d-e�dDe�dEe�dFe�dGe}e�e�e�fdHe}e�e�e�fdIeeeBe�fd+e�fdJ�Z�dKe�dLe�dEe�d-e�d+e�f dM�Z�dNe�dGe}e�e�e�fdHe}e�e�e�fdKe�dLe�dEe�d-e�dIeBd+dfdO�Z�dKe�dLe�dEe�d-e�d+e�e�e�ff dP�Z� d�dQe7d@ee@d+e�ee�e�ffdR�Z� d�dQe7d@ee@d+e�ee�e�ffdS�Z� d�dTe6dUee�d@ee@d+e�ee�e�e�efffdV�Z� d�dWe�dXesd>ee�dYe�e�efd@ee@d+e�e}e3e�ff dZ�Z�eGd[�d\����Z�eGd]�d9����Z�Gd^�d_��Z�Gd`�dae���Z�Gdb�dce���Z�Gdd�dee���Z�Gdf�dge���Z�Gdh�die���Z�Gdj�dke���Z�Gdl�dme���Z� d�dUe�dpeee�ezfd@ee@dIeeeBe�fdKee�dqee�dTeee�e}e�e6fdree�d)ee�d+e�fds�Z� d�dUe�d-ee�dqee�dTeee�ee�ee�ee�ee�fffdree�dteeLd@ee@dIeeeBe�fdpeee�ezfdueee�e�fdvee�d)ee�d+e4fdw�Z� d�dUe�d-ee�dqee�dTeee�ee�ee�ee�ee�fffdxeee�e`fdree�dteeLd@ee@dIeeeBe�fdyeeene�fdzee�d{e�dpeee�ezfdueee�e�fd|e�d}ee�dvee�d)ee�d+ee=e1e>eSff&d~�Z� d�dexdzee�dvee�d+ee1e=ffd��Z�dS)�zAccess datasets.�N)�Counter)�Mapping�Sequence)� nullcontext)� dataclass�field)�Path)�Any�Optional�Union)� url_to_fs)� DatasetCard�DatasetCardData�HfApi)�EntryNotFoundError�GatedRepoError�LocalEntryNotFoundError�OfflineModeIsEnabled�RepositoryNotFoundError�RevisionNotFoundError� get_session�)� __version__�config)�Dataset)� BuilderConfig�DatasetBuilder)� DataFilesDict� DataFilesList�DataFilesPatternsDict�EmptyDatasetError�get_data_patterns�sanitize_patterns)� DatasetDict�IterableDatasetDict)�DownloadConfig)� DownloadMode)�StreamingDownloadManager� xbasename�xglob�xjoin)�DataFilesNotFoundError�DatasetNotFoundError)�Features)�Hasher)� DatasetInfo�DatasetInfosDict)�IterableDataset)�camelcase_to_snakecase�snakecase_to_camelcase)�_EXTENSION_TO_MODULE�_MODULE_TO_EXTENSIONS�_MODULE_TO_METADATA_FILE_NAMES�_PACKAGED_DATASETS_MODULES�_hash_python_lines)�FolderBasedBuilder)�Split)�_dataset_viewer)�!_raise_if_offline_mode_is_enabled� cached_path�get_datasets_user_agent�init_hf_modules�is_relative_path�relative_to_absolute_path�url_or_path_join)�hf_dataset_url)�VerificationMode�is_small_dataset)� get_logger)�MetadataConfigs)� get_imports�lock_importable_file)�PathLike)�Version�.zipc� �td���)Nz�Loading this dataset requires you to execute custom code contained in the dataset repository on your local machine. Please set the option `trust_remote_code=True` to permit loading of this dataset.)� ValueError)�signum�frames �]/home/asafur/pinokio/api/open-webui.git/app/env/lib/python3.11/site-packages/datasets/load.py�_raise_timeout_errorrSis�� � e� � ���trust_remote_code�repo_id�returnc���|�|n tj}|��tjdkr� tjtjt ��tjtj��|�Itd|�d|�d���}|���dvrd}n|���dvrd }|�Itjd��n4#t$rtd|�d|�d ����wxYwt dd��|S) z� Copied and adapted from Transformers https://github.com/huggingface/transformers/blob/2098d343cc4b4b9d2aea84b3cf1eb5a1e610deff/src/transformers/dynamic_module_utils.py#L589 Nr�The repository for �� contains custom code which must be executed to correctly load the dataset. You can inspect the repository content at https://hf.co/datasets/z�. You can avoid this prompt in future by passing the argument `trust_remote_code=True`. Do you wish to run the custom code? [y/N] )�yes�y�1T)�no�n�0�FzS. Please pass the argument `trust_remote_code=True` to allow custom code to be run.) r�HF_DATASETS_TRUST_REMOTE_CODE�TIME_OUT_REMOTE_CODE�signal�SIGALRMrS�alarm�input�lower� ExceptionrO)rUrV�answers rR�resolve_trust_remote_coderkpsX�� .?�-J�)�)�PV�Pt��� � � &�� *� *� �� �f�n�.B�C�C�C�� �V�8�9�9�9�'�/�"�F�g�F�F�nu�F�F�F���F� �|�|�~�~�):�:�:�,0�)�)������+?�?�?�,1�)�(�/�� �Q������� � � � �i�'�i�i�jq�i�i�i���� ���� !��t� ,� ,� ,� �s �B!C�!C'�name�hf_modules_cachec��t|��}tj�||��}tj|d���tj�tj�|d����sGt tj�|d��d��5 ddd��n #1swxYwY|S)aS Create a module with name `name` in which you can add dynamic modules such as datasets. The module can be imported using its name. The module is created in the HF_MODULE_CACHE directory by default (~/.cache/huggingface/modules) but it can be overridden by specifying a path to another directory in `hf_modules_cache`. T��exist_ok� __init__.py�wN)r@�os�path�join�makedirs�exists�open)rlrm�dynamic_modules_paths rR�init_dynamic_modulesrz�s���'�'7�8�8���7�<�<�(8�$�?�?���K�$�t�4�4�4�4� �7�>�>�"�'�,�,�';�]�K�K� L� L�� �"�'�,�,�3�]�C�C�S� I� I� � � � � � � � � � � � � � ���� � � � � �s�1B?�?C�Cc�,�tj|��}d}|j���D]c\}}t j|��rJt |t��r5t j|��r�C|}t j |��}|�||krn�d|S)zJImport a module at module_path and return its main class: a DatasetBuilderN) � importlib� import_module�__dict__�items�inspect�isclass� issubclassr� isabstract� getmodule)� module_path�module�module_main_clsrl�obj� obj_modules rR�import_main_classr��s��� � $�[� 1� 1�F��O��_�*�*�,�,��� ��c� �?�3� � � �J�s�N�$C�$C� ��!�#�&�&� ��!�O� �*�3�/�/�J��%�&�J�*>�*>���� �rTc��eZdZdZd�ZdS)�#_InitializeConfiguredDatasetBuilderaL From https://stackoverflow.com/questions/4647566/pickle-a-dynamically-parameterized-sub-class See also ConfiguredDatasetBuilder.__reduce__ When called with the param value as the only argument, returns an un-initialized instance of the parameterized class. Subsequent __setstate__ will be called by pickle. c�R�t��}t||||���|_|S)N)�default_config_name� dataset_name)r��configure_builder_class� __class__)�self� builder_cls�metadata_configsr�rlr�s rR�__call__z,_InitializeConfiguredDatasetBuilder.__call__�s6��1�3�3��/� �)�?R�ae� � � �� �� rTN)�__name__� __module__� __qualname__�__doc__r��rTrRr�r��s-������������rTr�r��builder_configsr�r�c�4����G���fd�d���}�j�������t|����|_�j�������t|����|_|S)z� Dynamically create a builder class with custom builder configs parsed from README.md file, i.e. set BUILDER_CONFIGS class variable of a builder class to custom configs list. c�,��eZdZ�Z�Z�jZd�ZdS)�9configure_builder_class.<locals>.ConfiguredDatasetBuilderc��|jjd}t��||j|j|jf|j���fS)Nr)r��__mro__r��BUILDER_CONFIGS�DEFAULT_CONFIG_NAMEr�r~�copy)r��parent_builder_clss rR� __reduce__zDconfigure_builder_class.<locals>.ConfiguredDatasetBuilder.__reduce__�sR��!%��!7��!:� �3�5�5�&��(��,��%� � � �"�"�$�$� � rTN)r�r�r�r�r�r�)r�r�r�s���rR�ConfiguredDatasetBuilderr��s7�������)��1�� �+� � � � � � rTr�)r�rh� capitalizer4r�)r�r�r�r�r�s``` rRr�r��s���������������;����( � � %� %� '� '� 2� 2� 4� 4�\�6L�\�6Z�6Z�\�\��%� � � %� %� '� '� 2� 2� 4� 4�\�6L�\�6Z�6Z�\�\��)� $�#rT�dataset_module� DatasetModulec�p�|jrt|j��n t��5t|j��}ddd��n #1swxYwY|jjrT|p|j�d��}|�td���t||jj|jj |���}|S)Nr�z-dataset_name should be specified but got None)r�r�r�) �importable_file_pathrJrr�r��builder_configs_parametersr��builder_kwargs�getrOr�r�)r�r�r�s rR�get_dataset_builder_classr��s�� � .� ��^�@�A�A�A� �]�]�D�D� (��(B�C�C� � D�D�D�D�D�D�D�D�D�D�D����D�D�D�D� �0�@�  �#�X�~�'D�'H�'H��'X�'X� � � ��L�M�M� M�-� �*�E�U� .� I� ]�%�  � � � � �s�A � A�A� file_pathsc���g}|D]y}tj�|��rC|�t t |���d�������d|�|���zg}|D]R}t|d���5}|�|� ����ddd��n #1swxYwY�St|��S)zt Convert a list of scripts or text files provided in file_paths into a hashed filename in a repeatable way. z *.[pP][yY]�utf-8��encodingN) rsrt�isdir�extend�listr �rglob�appendrx� readlinesr9)r�� to_use_files� file_path�lines�fs rR� files_to_hashr�s�� ,.�L��+�+� � �7�=�=�� #� #� +� � � ��T�)�_�_�%:�%:�<�%H�%H� I� I� J� J� J� J� � � � � *� *� *� *� �E�!�(�(� � �)�g� .� .� .� (�!� �L�L������ '� '� '� (� (� (� (� (� (� (� (� (� (� (���� (� (� (� (�� �e� $� $�$s�(C � C �C c� �tjsutjrk t���d�tj||dzf��dt��id���dS#t$rYdSwxYwdSdS)z'Update the download count of a dataset.�/�.pyz User-Agent�)�headers�timeoutN) r�HF_HUB_OFFLINE�HF_UPDATE_DOWNLOAD_COUNTSr�headru�S3_DATASETS_BUCKET_PREFIXr?ri)rls rR�increase_load_countr�s��� � ��V�%E�� � �M�M� � ����&�:�D�$��,�O�P�P�%�'>�'@�'@�A�� � � � � � �� � � � � �D�D� �������s�AA1�1 A?�>A?� base_path�imports�download_configc ���g}g}|���}|j�d|_|D]�\}}}} |dkr|�||f���%||krtd|�d|�d|�d|�d� ���|d krt ||d z��} n|d kr|} ntd ���t | |� ��} | � t j�| | ��} |�|| f����||fS)a� Download additional module for a module <name>.py at URL (or local path) <base_path>/<name>.py The imports must have been parsed first using ``get_imports``. If some modules need to be installed with pip, an error is raised showing how to install them. This function return the list of downloaded modules as tuples (import_name, module_file_path). The downloaded modules can then be moved into an importable directory with ``_copy_script_and_other_resources_in_importable_dir``. NzDownloading extra modules�libraryz Error in the z script, importing relative z module but z: is the name of the script. Please change relative import zl to another name and add a '# From: URL_OR_PATH' comment pointing to the original relative import file path.�internalr��externalzWrong import_type�r�) r�� download_descr�rOrCr>rsrtru) rlr�r�r�� local_imports�library_imports� import_type� import_name� import_path� sub_directory�url_or_filename�local_import_paths rR�_download_additional_modulesr�(sx���M��O�%�*�*�,�,�O��$�,�(C��%�@G�?�?�<� �[�+�}� �)� #� #� � "� "�K��#=� >� >� >� � �$� � ��O��O�O�+�O�O�"�O�O�1<�O�O�O��� � �*� $� $�.�y�+��:M�N�N�O�O� �J� &� &�)�O�O��0�1�1� 1�'� �+� � � �� � $� "�� � �->� � N� N� ����k�+<�=�>�>�>�>� �/� )�)rTr�c���i}|D]9\}} tj|��}�#t$r||vs||kr|||<Y�6wxYw|r�t|��dkrdnd}t|��dkrdnd}d|���vrd|d<d|���vrd |d<td |�d |�d d �|���d|�dd�|������d� ���dS)Nr� dependencies� dependency�them�it�sklearnz scikit-learn�Bio� biopythonzTo be able to use z$, you need to install the following �: z, z. Please install z using 'pip install � z' for instance.)r|r}� ImportError�len�keysru�values)rlr��needs_to_be_installed�library_import_name�library_import_path�lib�_dependencies_str� _them_strs rR�_check_library_importsr�Ws�����4C�Q�Q�0��0� Q��)�*=�>�>�C�C��� Q� Q� Q�"�*?�?�?�CV�Zm�Cm�Cm�=P�%�&9�:��� Q�����  �.1�2G�.H�.H�1�.L�.L�N�N�R^��!�"7�8�8�1�<�<�F�F�$� � �-�2�2�4�4� 4� 4�/=� !�)� ,� �)�.�.�0�0� 0� 0�+6� !�%� (�� I�� I� I�K\� I� I��y�y�.�/�/� I� I�CL� I� I��x�x�-�4�4�6�6�7�7� I� I� I� � � �  �  s � �<�<�importable_directory_path�subdirectory_name�original_local_pathr��additional_files� download_modec���tj�||��}tj�||dz��}t|��5|tjkr3tj�|��rtj|��tj |d���tj�|d��} tj�| ��s)t| d��5 ddd��n #1swxYwYtj |d���tj�|d��} tj�| ��s)t| d��5 ddd��n #1swxYwYtj�|��stj ||��tj� |��ddz} tj�| ��sD||d �} t| dd � ��5} tj| | ��ddd��n #1swxYwY|D�]\} }tj�|��rXtj�|| dz��}tj�|��stj ||���}tj�|��rUtj�|| ��}tj�|��stj||����t%d |�����|D]n\}}tj�||��}tj�|��rt'j||��stj ||���o|cddd��S#1swxYwYdS) aCopy a script and its required imports to an importable directory Args: name (str): name of the resource to load importable_directory_path (str): path to the loadable folder in the dynamic modules directory subdirectory_name (str): name of the subdirectory in importable_directory_path in which to place the script original_local_path (str): local path to the resource script local_imports (List[Tuple[str, str]]): list of (destination_filename, import_file_to_copy) additional_files (List[Tuple[str, str]]): list of (destination_filename, additional_file_to_copy) download_mode (Optional[Union[DownloadMode, str]]): download mode Return: importable_file: path to an importable module with importlib.import_module r�TrorqrrNr�.json)zoriginal file pathzlocal file pathr�r�zError with local import at )rsrtrurJr'�FORCE_REDOWNLOADrw�shutil�rmtreervrx�copyfile�splitext�json�dump�isfiler��copytreer��filecmp�cmp)rlr�r�r�r�r�r��importable_subdirectory�importable_file�init_file_path� meta_path�meta� meta_filer�r��full_path_local_import� file_name� original_path�destination_additional_paths rR�2_copy_script_and_other_resources_in_importable_dirrns���4!�g�l�l�+D�FW�X�X���g�l�l�#:�D�5�L�I�I�O� �o� .� .�4�4� �L�9� 9� 9�b�g�n�n�Mf�>g�>g� 9� �M�3� 4� 4� 4� � �-��=�=�=�=�����&?��O�O���w�~�~�n�-�-� ��n�c�*�*� � �� � � � � � � � � � � ���� � � � � � �+�d�;�;�;�;�����&=�}�M�M���w�~�~�n�-�-� ��n�c�*�*� � �� � � � � � � � � � � ���� � � � ��w�~�~�o�.�.� B� �O�/�� A� A� A��G�$�$�_�5�5�a�8�7�B� ��w�~�~�i�(�(� +�*=�Ra�b�b�D��i��w�7�7�7� +�9�� �$� �*�*�*� +� +� +� +� +� +� +� +� +� +� +���� +� +� +� +�)6� O� O� $�K���w�~�~�k�*�*� O�)+����6M�{�]b�Ob�)c�)c�&��w�~�~�&<�=�=�I��O�K�1G�H�H�H������{�+�+� O�)+����6M�{�)[�)[�&��w�~�~�&<�=�=�I��O�K�1G�H�H�H��!�"M� �"M�"M�N�N�N�)9� L� L� $�I�}�*,�'�,�,�7N�PY�*Z�*Z� '��7�>�>�"=�>�>� L�g�k��:�G�G� L��� �/J�K�K�K���i4�4�4�4�4�4�4�4�4�4�4�4����4�4�4�4�4�4s��B)O�<D �> O� D �O�D �A(O�:F�< O�F � O�F �BO�%I�; O�I � O�I �E:O�O�Ory�module_namespacec���tj�|||�dd����}tj�|||�d��ddz��S)Nr��--�����r�)rsrtru�replace�split)ryrr�rlr�s rR�_get_importable_file_pathr�sb�� !#�� � �-A�CS�UY�Ua�Ua�be�gk�Ul�Ul� m� m�� �7�<�<�1�3D�d�j�j�QT�o�o�VX�FY�\a�Fa� b� b�brT� local_pathc ��tj�|||�dd����}t |���dd���t |��jdz �d���t|� d��d||||||���} t� d | ����dS) Nr�rT)�parentsrprqror)rlr�r�r�r�r�r�z#Created importable dataset file at ) rsrtrurr �mkdir�parent�touchrr�logger�debug) rr�r�ryrr�rlr�r��importable_local_files rR�_create_importable_filer�s���!#�� � �-A�CS�UY�Ua�Ua�be�gk�Ul�Ul� m� m��� "�#�#�)�)�$��)�F�F�F� � #�$�$�+�m�;�B�B�D�B�Q�Q�Q�N� �Z�Z��_�_�R� �";�+�&�#�)�#����� �L�L�N�7L�N�N�O�O�O�O�OrTc ���d�tj�|��||�dd��||�d��dg��}||fS)N�.r�rr)rursrt�basenamerr)ryrr�rlr�s rR�_load_importable_filer#�sg�� �(�(� �G� � �1� 2� 2� � �L�L��d� #� #� � �J�J�s�O�O�B� �  ���K� �)� )�)rT�data_files_listc��td�|dtj�D����}|r�dtttt ft fdtt t ffd�}t|���|d���D]9\\}}}|tvrt|cS|dkrt||� ��cS�:difS) a%Infer module (and builder kwargs) from list of data files. It picks the module based on the most common file extension. In case of a draw ".parquet" is the favorite, and then alphabetical order. Args: data_files_list (DataFilesList): List of data files. download_config (bool or str, optional): Mainly use `token` or `storage_options` to support different platforms and auth types. Returns: tuple[str, dict[str, Any]]: Tuple with - inferred module name - dict of builder kwargs c3��K�|]c}t|���d��dd�D]6}d|���zt|��tjvfV��7�ddS�r!rN)r)rrhr:�METADATA_FILENAMES��.0�filepath�suffixs rR� <genexpr>z3infer_module_for_data_files_list.<locals>.<genexpr>s�����!�!� ���)�)�/�/��4�4�Q�R�R�8�!�!� � �v�|�|�~�~� �y��2�2�6H�6[�[�\�!�!�!�!�!�!�!rTN� ext_countrWc�F�|\\}}}| ||dk|dk|dk|dk|fS)z]Sort by count and set ".parquet" as the favorite in case of a draw, and ignore metadata filesz.parquetz.jsonlr�z.csvr�)r.�ext� is_metadata�counts rR�sort_keyz2infer_module_for_data_files_list.<locals>.sort_keys@��(1� %� �S�+��#�O�U�C�:�,=�s�h��PS�W^�P^�`c�gm�`m�or�s� srTT)�key�reverserMr�) rr�*DATA_FILES_MAX_NUMBER_FOR_MODULE_INFERENCE�tuple�str�bool�int�sortedrr5�,infer_module_for_data_files_list_in_archives)r$r��extensions_counterr3r0�_s rR� infer_module_for_data_files_listr?�s��"!�!�!�'�([�&�*[�([�\�!�!�!���� � v� t��e�C��I�&6��&;� <� t��s�D�y�AQ� t� t� t� t� "�"4�":�":�"<�"<�(�TX�Y�Y�Y� v� v�K�H�S�!�a��*�*�*�+�C�0�0�0�0�����C�O�et�u�u�u�u�u�u�� ��8�OrTc���g}d}|D]�}t|���d��rw|dz }|tjkrnat t ���|��d��}|d�t|d|���dtj�D��z }��td �|D����}|r7|� d��dd}|tvr t|SdifS) a�Infer module (and builder kwargs) from list of archive data files. Args: data_files_list (DataFilesList): List of data files. download_config (bool or str, optional): Mainly use `token` or `storage_options` to support different platforms and auth types. Returns: tuple[str, dict[str, Any]]: Tuple with - inferred module name - dict of builder kwargs rrMrz**c�D�g|]}|�d��d��S)z::r�r)r*r�s rR� <listcomp>z@infer_module_for_data_files_list_in_archives.<locals>.<listcomp>2s7���������� � �a� ���rTT)� recursiver�Nc3�K�|]H}t|���d��dd�D]}d|���zV���IdSr')r)rrhr)s rRr-z?infer_module_for_data_files_list_in_archives.<locals>.<genexpr>8sx����!�!�!)�)�T\�J]�J]�Jc�Jc�dg�Jh�Jh�ij�ik�ik�Jl�!�!�@F��f�l�l�n�n��!�!�!�!�!�!�!rT) r8�endswithr�2GLOBBED_DATA_FILES_MAX_NUMBER_FOR_MODULE_INFERENCEr+r(�extractr*�3ARCHIVED_DATA_FILES_MAX_NUMBER_FOR_MODULE_INFERENCEr� most_commonr5)r$r��archived_files�archive_files_counterr+� extractedr=rJs rRr<r<s7���N���#� � �� �x�=�=� !� !�&� )� )� � !�Q� &� !�$�v�'`�`�`����6�8�8�@�@��J�J�D�Q�Q�I� ����y�D�/�Z�Z�Z�P�f�P�P����� �N�� !�!�!�-;�!�!�!�����5�(�4�4�Q�7�7��:�1�=� � �.� .� .�'� �4� 4� ��8�OrT� data_filesrtc�h�����fd�|���D��}tt|�������\��t ��fd�|���D����rt d|������st d|rd|��ndz�����fS)a$Infer module (and builder kwargs) from data files. Raise if module names for different splits don't match. Args: data_files ([`DataFilesDict`]): Dict of list of data files. path (str, *optional*): Dataset name or path. download_config ([`DownloadConfig`], *optional*): Specific download configuration parameters to authenticate on the Hugging Face Hub for private remote files. Returns: tuple[str, dict[str, Any]]: Tuple with - inferred module name - builder kwargs c�:��i|]\}}|t|������S)r�)r?)r*rr$r�s �rR� <dictcomp>z/infer_module_for_data_files.<locals>.<dictcomp>Rs>������ "�E�?� �/��Q`�a�a�a���rTc3�(�K�|] }��f|kV�� dS�Nr�)r*� split_module�default_builder_kwargs� module_names ��rRr-z.infer_module_for_data_files.<locals>.<genexpr>Ws.����� l� l�\�K�/� 0�L� @� l� l� l� l� l� lrTz=Couldn't infer the same data file format for all splits. Got zNo (supported) data files foundz in ra)r�next�iterr��anyrOr,)rNrtr�� split_modulesrUrVs ` @@rR�infer_module_for_data_filesr[Bs������ ����&0�&6�&6�&8�&8����M�+/�t�M�4H�4H�4J�4J�/K�/K�*L�*L�'�K�'� � l� l� l� l� l�Ub�Ui�Ui�Uk�Uk� l� l� l�l�l�j��h�Yf�h�h�i�i�i� �j�$�%F�[_�Jg�-�QU�-�-�-�eg�%h�i�i�i� �.� .�.rTr�r�rUc ����t|��}|j�|���}g}|�in|}|�|nd}|���D�]\}} | �d��} | �d��} | rt || ��n|} | �t | ��nt| |���} tj | t���}n)#t$r}td|�d|�d���|�d}~wwxYw�fd �| D��}|rt� d |�d ���|��d||| d ��fd �i|�| ����D��������||fS)NrarN�data_dirr�)�allowed_extensionsz Dataset at 'z?' doesn't contain data files matching the patterns for config 'zZ', check `data_files` and `data_fir` parameters in the `configs` YAML field in README.md. c�@��g|]}t�|��s|dk�|��S)�default��hasattr)r*�param�builder_config_clss �rRrCz@create_builder_configs_from_metadata_configs.<locals>.<listcomp>s?��� � � ��G�<N�PU�4V�4V� �[`�dm�[m�[m�E�[m�[m�[mrTz#Some datasets params were ignored: zx. Make sure to use only valid params for the dataset builder and to have a up-to-date version of the `datasets` library.)rlrNr]c�D��i|]\}}t�|��r|dv�||��S))r`rNr]ra)r*rc�valuerds �rRrQz@create_builder_configs_from_metadata_configs.<locals>.<dictcomp>�sK������$��u��1�5�9�9��?D�Kp�>p�>p��5�>p�>p�>prTr�)r��BUILDER_CONFIG_CLASS�get_default_config_namerr�r+r#r"r � from_patterns�ALL_ALLOWED_EXTENSIONSr!r�warningr�)r�r�r�rUr�r�r�r�� config_name� config_params�config_data_files�config_data_dir�config_base_path�config_patterns�config_data_files_dict�e�ignored_paramsrds @rR�,create_builder_configs_from_metadata_configsru^s^���$�K�0�0�K�$�9��*�B�B�D�D���O�#9�#A�R�R�G]��&�2� � ��I�&6�&<�&<�&>�&>�' �' �"� �]�)�-�-�l�;�;��'�+�+�J�7�7��@O�^�5��O�<�<�<�U^�� �%�0�"�"3�4�4�4�&�'7��Y�Y�Y� � &;�%H��#9�&�&�&� "� "��!� � � �#�l�y�l�l�it�l�l�l���� ����� ����  � � � �,� � � �� � � �N�N�B�n�B�B�B� � � � ��� � � � �1�(� � �����(S�+A�(S�]�(S�(Y�(Y�([�([���� � � � � � � �/� /�/s�=C� C7�C2�2C7c�n�eZdZUdZdZeeed<dZee e ed<dZ ee ed<dS)�BuilderConfigsParametersa�Dataclass containing objects related to creation of builder configurations from yaml's metadata content. Attributes: metadata_configs (`MetadataConfigs`, *optional*): Configs parsed from yaml's metadata. builder_configs (`list[BuilderConfig]`, *optional*): List of BuilderConfig objects created from metadata_configs above. default_config_name (`str`): Name of default config taken from yaml's metadata. Nr�r�r�) r�r�r�r�r�r rH�__annotations__r�r�rr�r8r�rTrRrwrw�sc������� � �37��h��/�6�6�6�59�O�X�d�=�1�2�9�9�9�)-���#��-�-�-�-�-rTrwc��eZdZUeed<eed<eed<ee���Zeed<dZ e e ed<dZ e eed<dS) r�r��hashr�)�default_factoryr�N� dataset_infosr�) r�r�r�r8rx�dictrrwr�r|r r1r�r�rTrRr�r��s~����������� �I�I�I�����;@�5�Qi�;j�;j�;j�� 8�j�j�j�04�M�8�,�-�4�4�4�*.��(�3�-�.�.�.�.�.rTc��eZdZdefd�ZdS)�_DatasetModuleFactoryrWc��t�rS)�NotImplementedError)r�s rR� get_modulez _DatasetModuleFactory.get_module�s��!�!rTN)r�r�r�r�r�r�rTrRrr�s/������"�M�"�"�"�"�"�"rTrc ��eZdZdZ d dedeedeeeefdeedee f d�Z d e fd �Z dS) �#LocalDatasetModuleFactoryWithScriptzTGet the module of a local dataset. The dataset script is loaded from a local script.Nrtr�r�ryrUc��||_t|��j|_|p t ��|_||_||_||_dSrS) rtr �stemrlr&r�r�ryrU)r�rtr�r�ryrUs rR�__init__z,LocalDatasetModuleFactoryWithScript.__init__�sK���� ���J�J�O�� �.�B�.�2B�2B���*���$8��!�!2����rTrWc �V�tjr2|j�+tjd|j�d|j�d�t��t|j��j tj z }t|j��j tj z }t|j��}t|jtt|j��j ��||j���\}}g}|���r.|�tj t|��f��|���r!|�tj |f��|jr|jn t'��}t)|jgd�|D��z��}t+|d||j���} t,j�| ��s[t1|j|j��} | r't3|j|||d||j|j���nt7d |j�d ����t9|j|� ��t;|d||j���\} }t=j��d tt|j��j ��i} tA| || | � ��S)NrYzv contains custom code which must be executed to correctly load the dataset. You can inspect the repository content at �� You can avoid this message in future by passing the argument `trust_remote_code=True`. Passing `trust_remote_code=True` will be mandatory to load this dataset from the next major release of `datasets`.�rlr�r�r�c��g|] }|d�� S�rr��r*�locs rRrCzBLocalDatasetModuleFactoryWithScript.get_module.<locals>.<listcomp>�s��+L�+L�+L�s�C��F�+L�+L�+LrT�datasets�ryrr�rl�rr�r�ryrr�rlr��Loading �� requires you to execute the dataset script in that repo on your local machine. Make sure you have read the code there to avoid malicious use, then set the option `trust_remote_code=True` to remove this error.�rlr�r��r�)!rrbrU�warnings�warnrlrt� FutureWarningr r�DATASETDICT_INFOS_FILENAME�REPOCARD_FILENAMErIr�r8r��is_filer�ryrzr�rrsrwrkrr�rOr�r#r|�invalidate_cachesr�) r��dataset_infos_path�dataset_readme_pathr�r�r�r�ryrzr�rUr�r�s rRr�z.LocalDatasetModuleFactoryWithScript.get_module�s��� � /� �D�4J�4R� �M�F�d�i�F�F�OS�y�F�F�F��  � � �"�$�)�_�_�3�f�6W�W��"�4�9�o�o�4�v�7O�O���d�i�(�(��)E����$�t�y�/�/�0�1�1�� �0� * �* �* �&� �� �� � %� %� '� '� b� � #� #�V�%F��L^�H_�H_�$`� a� a� a� � &� &� (� (� U� � #� #�V�%=�?R�$S� T� T� T�<@�<U�q�t�8�8�[o�[q�[q���d�i�[�+L�+L�m�+L�+L�+L�L�M�M��8�!5�'�"���  � � �� �w�~�~�2�3�3� � 9�$�:P�RV�R[� \� \� � � �'�#�y�"/�%5�)=�%/�&*���"&�"4� � � � � �!�U�t�y�U�U�U���� �D�I��O�O�O�O�1�!5�'�"���  � � �� �T� �#�%�%�%�%�s�4�� �?�?�+A�'B�'B�C���[�$��Ui�j�j�j�jrT�NNNN) r�r�r�r�r8r r&r r'r9r�r�r�r�rTrRr�r��s�������^�^� 59�<@�.2�,0� 3� 3�� 3�"�.�1� 3� ��l�C�&7� 8�9� 3� '�s�m� 3� $�D�>� 3� 3� 3� 3�?k�M�?k�?k�?k�?k�?k�?krTr�c ��eZdZdZ d dedeedeeeeefdeee effd�Z de fd �Z dS) �&LocalDatasetModuleFactoryWithoutScriptz�Get the module of a dataset loaded from the user's data files. The dataset builder module to use is inferred from the data files extensions.Nrtr]rNr�c��|r1tj�|��rtd|�����t |�����|_t |��j|_||_||_ ||_ dS)Nz;`data_dir` must be relative to a dataset directory's root: ) rsrt�isabsrOr �as_posixr�rlrNr]r�)r�rtr]rNr�s rRr�z/LocalDatasetModuleFactoryWithoutScript.__init__s�� � c��� � �h�/�/� c��a�[_�a�a�b�b� b���J�J�'�'�)�)�� ���J�J�O�� �$��� �� �*����rTrWc �� �tj�|jtj��}tj�|jtj��}tj�|��rtj|��j n t��}tj� |��r�t|d���5}tj|�����}|r5|���}|�|��tdi|��}ddd��n #1swxYwYt%j|��}t)j|��}t+|j|jpd�����������} |j�t7|j��} n�|ry|jsrdt9t;|�������vrBt7t9t;|�������d��} nt?| ��} tAj!| | tD���} tG| |j���\} } | �$tJ| tL| ���} tN| \}}|rtQ||| | ���\}}ntS|��j*dd| i| ��g}d}|jtWt+|j��j,��d �}|jr| |d<tj�tj�|jtj-����r�ttj�|jtj-��d���5}t)d �t]j|���/��D����}ta|��d kr4t9t;|����}|�1|��|d <ddd��n #1swxYwY|�|��|}|�/ta|��d krt9t;|����}tej3||d ���}ti||||tk|||������S)Nr�r�rarN)r�r^)rNrt�� extensions� file_names)r�rU)r�r�c�>�i|]\}}|tj|����Sr��r0� from_dict�r*rl�dataset_info_dicts rRrQzELocalDatasetModuleFactoryWithoutScript.get_module.<locals>.<dictcomp>\�;�����:�K�):�$�[�%:�;L�%M�%M���rTrr`)r|r��r�r�r��r|r�r�)6rsrtrurr��REPOYAML_FILENAMErr�load�datarrwrx�yaml� safe_load�read�to_dict�updaterH�from_dataset_card_datar1r r]� expanduser�resolver�rNr#rWrXr�r"rrirjr[�filterr6r7r8rur�rgr3rlr�r�rr��popr/rzr�rw)r�� readme_path�standalone_yaml_path�dataset_card_datar��standalone_yaml_data�_dataset_card_data_dictr�r|r��patternsrNrVrUr�r>r�r�r��legacy_dataset_infos�legacy_config_namerzs rRr�z1LocalDatasetModuleFactoryWithoutScript.get_module sV���g�l�l�4�9�f�.F�G�G� �!�w�|�|�D�I�v�7O�P�P��BD�'�.�.�Q\�B]�B]�t�K�,�[�9�9�>�>�cr�ct�ct�� �7�>�>�.� /� /� S��*�W�=�=�=� S��'+�~�a�f�f�h�h�'?�'?�$�'�S�.?�.G�.G�.I�.I�+�+�2�2�3G�H�H�H�(7�(R�(R�:Q�(R�(R�%�  S� S� S� S� S� S� S� S� S� S� S���� S� S� S� S� +�A�BS�T�T��(�?�@Q�R�R� ����D�M�$7�R�8�8�C�C�E�E�M�M�O�O�X�X�Z�Z� � �?� &�(���9�9�H�H� � 4�d�m� 4� ��T�Rb�Ri�Ri�Rk�Rk�Ml�Ml�Hm�Hm�8m�8m�(��d�3C�3J�3J�3L�3L�.M�.M�)N�)N�|�)\�]�]�H�H�(��3�3�H�"�0� ��5� � � � � /J�!���/ �/ �/ �+� �+� �&�&�,�[�9�Fd�ep�Fq�'� � � �4�K�@�� �Q� � '�3_�� �#�'=� 4�4�4� 0�O�0�0�D�!�+�.�.�C���)��,���4�O� #'� ���2�4�� �?�?�3G�H�H� � �� �=� 6�+5�N�<� (� �7�>�>�"�'�,�,�t�y�&�2S�T�T� U� U� 1��b�g�l�l�4�9�f�.O�P�P�[b�c�c�c� c�gh�'7���>B�i��l�l�>P�>P�>R�>R����(�(�$� �+�,�,��1�1�)-�d�3G�.H�.H�)I�)I�&�6J�6N�6N�Oa�6b�6b�(��3� c� c� c� c� c� c� c� c� c� c� c���� c� c� c� c� !� '� '� � 6� 6� 6�0�M� � &�3�}�+=�+=��+B�+B�"&�t�M�':�':�";�";� ��{�]��_�_�`�`��� � � �'�'?�!1� /�$7�(�(�(�  �  �  � s&�AD?�?E�E�BQ)�)Q-�0Q-�NNN) r�r�r�r�r8r r r�r}r'r�r�r�r�rTrRr�r� s�������'�'� #'�7;�<@� +�+��+��3�-�+��U�3��d�?�3�4� +�  ��l�C�&7� 8�9� +�+�+�+� U �M�U �U �U �U �U �U rTr�c��eZdZdZ d dedeedeeeeefdee deee eff d�Z d e fd �Z dS) �PackagedDatasetModuleFactoryz`Get the dataset builder module from the ones that are packaged with the library: csv, json, etc.Nrlr]rNr�r�c�j�||_||_||_||_||_t |��dSrS)rlrNr]r�r�r�)r�rlr]rNr�r�s rRr�z%PackagedDatasetModuleFactory.__init__{s=���� �$��� �� �.���*����D�!�!�!�!�!rTrWc��t|jpd�����������}|j�t |j��nt||j���}tj ||j|���}t|j \}}||j d�}t|||��S)Nrar�)r�r�)rNr�)r r]r�r�r�rNr#r"r�rrir8rlr�)r�r�r�rNr�rzr�s rRr�z'PackagedDatasetModuleFactory.get_module�s������,�"�-�-�8�8�:�:�B�B�D�D�M�M�O�O� ���*� �d�o� .� .� .�"�9�d�>R�S�S�S� � #�0� � �0�� � � � � 7�t�y�A�� �T�%� �I� � �� �[�$��?�?�?rTr�)r�r�r�r�r8r r r�r}r&r'r�r�r�r�rTrRr�r�xs�������j�j� #'�7;�48�<@� "� "�� "��3�-� "��U�3��d�?�3�4� "� "�.�1� "�  ��l�C�&7� 8�9� "� "� "� "�@�M�@�@�@�@�@�@rTr�c��eZdZdZ ddededeedeeeeefdee d eee efd e fd �Z d e fd �ZdS)�$HubDatasetModuleFactoryWithoutScriptz� Get the module of a dataset loaded from data files of a dataset repository. The dataset builder module to use is inferred from the data files extensions. NFrl� commit_hashr]rNr�r��use_exported_dataset_infosc��||_||_||_||_|p t ��|_||_||_t|��dSrS) rlr�rNr]r&r�r�r�r�)r�rlr�r]rNr�r�r�s rRr�z-HubDatasetModuleFactoryWithoutScript.__init__�sX���� �&���$��� �� �.�B�.�2B�2B���*���*D��'��D�!�!�!�!�!rTrWc �� ��ttj|jjdt t |jj�����} |�|j tj d|j |jj ���}tj|��j}n#t $rt#��}YnwxYw|j���}|j�d|_ t)t+|j tj|j ���|���}t/|d� ��5}t1j|�����}|r5|���}|�|��t#di|��}ddd��n #1swxYwYn#t:$rYnwxYwd |j �d |j �d |jpd ���d ��} tAj!|��} tEj!|��} tj#rf|j$r_ tKj&|j |j |jj����tE�fd��D�����n#tJj'$rd�YnwxYwd��r��| ���} |j(�tS|j(��} n�| ry|jsrdtUtW| �,������vrBtStUtW| �,������d��} nt[| |j���} t]j/| | t`|j���} tc| |j |j���\}}| �2tf|th|���} tj|\}}| rtm|| | ||j���\}}nto|��j8dd| i|��g}d}t+|j d |j ����d ��|j tstu|j ��j ��d�}|jr| |d<|j���}|j�d|_ t)t+|j tj;|j ���|���}t/|d� ��5}tEd�tyj|���=��D����}t}|��dkr4tUtW|����}|�?|��|d<ddd��n #1swxYwY|�| ��|} n#t:$rYnwxYw|�/t}| ��dkrtUtW| ����}t�||j || t�| ||������S)Nr���endpoint�token� library_name�library_version� user_agent�dataset�rV�filename� repo_type�revision�proxieszDownloading standalone yaml�r�r�r�r�zhf://datasets/�@r�ra�r�r�r�c�F��i|]}|tj�|����Sr�r��r*rl�exported_dataset_infoss �rRrQzCHubDatasetModuleFactoryWithoutScript.get_module.<locals>.<dictcomp>�s=������'�$�[�%:�;Q�R]�;^�%_�%_���rTrN)r�r^r�)rNrtr�r�)r�rUr�)r�rVr��Downloading metadatac�>�i|]\}}|tj|����Sr�r�r�s rRrQzCHubDatasetModuleFactoryWithoutScript.get_module.<locals>.<dictcomp>,r�rTrr`r�r�r�)Brr� HF_ENDPOINTr�r�rr?r��hf_hub_downloadrlr�r�r�rr�r�rrr�r�r>rDr�rxr�r�r�r�r��FileNotFoundErrorr]�rstriprHr�r1�USE_PARQUET_EXPORTr�r<�get_exported_dataset_infos�DatasetViewerErrorrNr#rWrXr�r"rrirjr[r�r6r7r8rur�rgr3r r�r�rr�r�r�rw)r��apir�r�r�r�r�r�r�r�r�r|r�rNrVrUr�r>r�r�r�r�r�r�r�s @rRr�z/HubDatasetModuleFactoryWithoutScript.get_module�sR�����'��&�,�#�'�.�t�/C�/N�O�O�  � � �� 2�"%�"5�"5�� ��1�#��)��,�4� #6�#�#� �!,� 0�1D� E� E� J� � ��!� 2� 2� 2� /� 1� 1� � � � 2�����.�3�3�5�5�� � (� 0�,I�O� )� �#.��t�y�&�*B�T�M]�^�^�^� /�$�$�$� ��*�W�=�=�=� S��'+�~�a�f�f�h�h�'?�'?�$�'�S�.?�.G�.G�.I�.I�+�+�2�2�3G�H�H�H�(7�(R�(R�:Q�(R�(R�%�  S� S� S� S� S� S� S� S� S� S� S���� S� S� S� S��� !� � � � �D� ����Y�T�Y�Y�Y��1A�Y�Y�D�M�DW�UW�Y�Y�`�`�ad�e�e� �*�A�BS�T�T��(�?�@Q�R�R� � � $� *��)H� *� .�)8�)S� �I�4�3C�4�K_�Ke�*�*�*�&�*:�����+A����*�*�&�&�� #�5� .� .� .�)-�&�&�&� .����&*� "� !� 3� "� )� )�-� 8� 8� 8�2�M� �?� &�(���9�9�H�H� � Z�d�m� Z� ��T�Rb�Ri�Ri�Rk�Rk�Ml�Ml�Hm�Hm�8m�8m�(��d�3C�3J�3J�3L�3L�.M�.M�)N�)N�|�)\�]�]�H�H�(��D�DX�Y�Y�Y�H�"�0� ��5� �0�  � � � � /J�!��� �0�/ �/ �/ �+� �+�  �&�&�,�[�9�Fd�ep�Fq�'� � � �4�K�@�� �Q� � '�3_�� �#�'=� $� 4� 4�4�4� 0�O�0�0�D�!�+�.�.�C���)��,���4�O� #'� �'�� �2��@P�Q�Q�Q�X�X�Y\�]�]��y�2�4�� �?�?�3G�H�H� � �� �=� 6�+5�N�<� (��.�3�3�5�5�� � (� 0�,B�O� )� �!,��t�y�&�*K�VZ�Vf�g�g�g� /�"�"�"� ��(�7�;�;�;� c�q�'7���>B�i��l�l�>P�>P�>R�>R����(�(�$� �+�,�,��1�1�)-�d�3G�.H�.H�)I�)I�&�6J�6N�6N�Oa�6b�6b�(��3� c� c� c� c� c� c� c� c� c� c� c���� c� c� c� c� !� '� '� � 6� 6� 6�0�M�M�� � � � � �D� ���� � &�3�}�+=�+=��+B�+B�"&�t�M�':�':�";�";� �� � � � �'�'?�!1� /�$7�(�(�(�  �  �  � s��AB�B4�3B4�AF�%AF� F�F�F�F�F� F(�'F(�AI�I1�0I1�AV�BU� V�U#�#V�&U#�'V� V�V)NNNNF)r�r�r�r�r8r r r�r}r&r'r9r�r�r�r�rTrRr�r��s���������#'�7;�48�<@�+0�"�"��"��"��3�-� "� �U�3��d�?�3�4� "� "�.�1� "� ��l�C�&7� 8�9�"�%)�"�"�"�"�&L �M�L �L �L �L �L �L rTr�c�B�eZdZdZ d dededeefd�Zdefd�Z dS) �(HubDatasetModuleFactoryWithParquetExportzg Get the module of a dataset loaded from parquet files of a dataset repository parquet export. Nrlr�r�c�j�||_||_|p t��|_t |��dSrS)rlr�r&r�r�)r�rlr�r�s rRr�z1HubDatasetModuleFactoryWithParquetExport.__init__Ns:�� �� �&���.�B�.�2B�2B����D�!�!�!�!�!rTrWc ��� �tj|j|j|jj���}tj|j|j|jj���� t� fd�� D����}ttj |jjdtt|jj ������|jd|jjd���j}t!j|||���}t$d \}}t'|||j� ��\}}|jt)t+|j��j��d �} t-||j| |t/|||� ��� ��S)Nr�c�F��i|]}|tj�|����Sr�r�r�s �rRrQzGHubDatasetModuleFactoryWithParquetExport.get_module.<locals>.<dictcomp>as<��� � � ���[�2�3I�+�3V�W�W� � � rTr�r�zrefs/convert/parquet�Y@)r�r�r�)�parquet_commit_hash�exported_parquet_filesr|�parquetr��rVr�r�r�)r<�get_exported_parquet_filesrlr�r�r�r�r1rrr�rr?r�� dataset_info�sharH�._from_exported_parquet_files_and_dataset_infosr8rur3r r�rw) r�r�r|r�r�r�r>r�r�r�r�s @rRr�z3HubDatasetModuleFactoryWithParquetExport.get_moduleYs����!0�!K��I�4�+;�4�CW�C]�" �" �" ��"1�!K��I�4�+;�4�CW�C]�" �" �" ��)� � � � �#9� � � � � � � ��+��*�0�'� +�2�4�3G�3R�S�S�  � � ��\�� �/��*�0�� ��� � � +�Y� 3�#9�'� � � �� 4�I�>�� �Q�/[� � � �0�0 �0 �0 �,��,� �y�2�4�� �?�?�3G�H�H� � �� � � � � �'�'?�!1� /�$7�(�(�(�  �  �  � rTrS) r�r�r�r�r8r r&r�r�r�r�rTrRr�r�Isx��������59� "� "�� "�� "�"�.�1� "� "� "� "�7 �M�7 �7 �7 �7 �7 �7 rTr�c��eZdZdZ ddededeedeeeefdeedee f d �Z d efd �Z d efd �Z d efd �Z d efd�ZdS)�!HubDatasetModuleFactoryWithScriptz� Get the module of a dataset from a dataset repository. The dataset script comes from the script inside the dataset repository. Nrlr�r�r�ryrUc��||_||_|p t��|_||_||_||_t|��dSrS)rlr�r&r�r�ryrUr�)r�rlr�r�r�ryrUs rRr�z*HubDatasetModuleFactoryWithScript.__init__�sR���� �&���.�B�.�2B�2B���*���$8��!�!2����D�!�!�!�!�!rTrWc���t|j|j�d��ddz|j���}|j���}|j�d|_t||���S)Nr�rr�r�zDownloading builder scriptr�)rDrlrr�r�r�r�r>)r�r�r�s rR�download_loading_scriptz9HubDatasetModuleFactoryWithScript.download_loading_script�sl��"�4�9�d�i�o�o�c�.B�.B�2�.F��.N�Y]�Yi�j�j�j� ��.�3�3�5�5�� � (� 0�,H�O� )��9�o�F�F�F�FrTc���t|jtj|j���}|j���}|j�d|_ t||���S#ttf$rYdSwxYw)Nr�r�r�) rDrlrr�r�r�r�r�r>r��ConnectionError)r�r|r�s rR�download_dataset_infos_filez=HubDatasetModuleFactoryWithScript.download_dataset_infos_file�s���&�t�y�&�2S�^b�^n�o�o�o� ��.�3�3�5�5�� � (� 0�,B�O� )� ��� /���� ��"�?�3� � � ��4�4� �����A � A5�4A5c���t|jtj|j���}|j���}|j�d|_ t||���S#ttf$rYdSwxYw)Nr�zDownloading readmer�) rDrlrr�r�r�r�r�r>r�r)r�� readme_urlr�s rR�download_dataset_readme_filez>HubDatasetModuleFactoryWithScript.download_dataset_readme_file�s���#�D�I�v�/G�RV�Rb�c�c�c� ��.�3�3�5�5�� � (� 0�,@�O� )� ��� /���� ��"�?�3� � � ��4�4� ���rc ���tjr2|j�+tjd|j�d|j�d�t ��|���}|���}|� ��}t|��}t|jt|jd|j ���||j���\}}g}|r!|�tj|f��|r!|�tj|f��|jr|jn t'��}t)|gd�|D��z��} t+|d| |j� ��} t,j�| ��sVt3|j|j��} | r"t5||||d| |j|j� ��nt9d |j�d ����t;|j|� ��t=|d| |j� ��\} } t?j ��t|jd|j ����!d��|jd�} tE| | | | ���S)NrYrZr�rar�r�c��g|] }|d�� Sr�r�r�s rRrCz@HubDatasetModuleFactoryWithScript.get_module.<locals>.<listcomp>�s��,M�,M�,M��S��V�,M�,M�,MrTr�r�r�r�r�r�r�)r�rVr�)#rrbrUr�r�rlr�r�rrrIr�rDr�r�r�r�r�ryrzr�rrsrtrwrkrr�rOr�r#r|r�r�r�)r�rr�r�r�r�r�r�ryrzr�rUr�r�s rRr�z,HubDatasetModuleFactoryWithScript.get_module�s��� � /� �D�4J�4R� �M�F�d�i�F�F�fj�fo�F�F�F��  � � ��1�1�3�3� �!�=�=�?�?��"�?�?�A�A���j�)�)��)E���$�T�Y��T�=M�N�N�N�� �0� * �* �* �&� �� �� � ]� � #� #�V�%F�HZ�$[� \� \� \� � U� � #� #�V�%=�?R�$S� T� T� T�<@�<U�q�t�8�8�[o�[q�[q���j�\�,M�,M�}�,M�,M�,M�M�N�N��8�!5�'�"���  � � �� �w�~�~�2�3�3� � 9�$�:P�RV�R[� \� \� � � �'�)�"/�%5�)=�%/�&*���"&�"4� � � � � �!�U�t�y�U�U�U���� �D�I��O�O�O�O�1�!5�'�"���  � � �� �T� �#�%�%�%�'�� �2��@P�Q�Q�Q�X�X�Y\�]�]��y� � ���[�$��Ui�j�j�j�jrTr�)r�r�r�r�r8r r&r r'r9r�r�rrr�r�r�rTrRr�r��s��������59�<@�.2�,0�"�"��"��"�"�.�1� "�  ��l�C�&7� 8�9� "� '�s�m� "�$�D�>�"�"�"�"�"G��G�G�G�G� �S� � � � � �c� � � � �Bk�M�Bk�Bk�Bk�Bk�Bk�BkrTr�c�P�eZdZdZ d dedeedeefd�Zdefd�ZdS) �CachedDatasetModuleFactoryz� Get the module of a dataset that has been loaded once already and cached. The script that is loaded from the cache is the most recent one with a matching name. Nrl� cache_dirryc�p�||_||_||_|j�d��dksJ�dS)Nr�r)rlr ryr2)r�rlr rys rRr�z#CachedDatasetModuleFactory.__init__s?�� �� �"���$8��!��y���s�#�#�q�(�(�(�(�(�(rTrWc � ����jr�jn t��}tj�|d�j�dd�����tj����rd�tj���D��nd}|r��fd�}t||���d}dtj��|���d tj ||�����d �j��}tj s|d z }t�|��t!|d|�j� ��}t#|d|�j� ��\}}t%j��d �ji}t)||||���Stj�t-�jp tj����} �j�d��} t5| d��| d<d�| ��} tj�| | ��} d�t7jtj�| ddd����D��} | ru�j�j�d��dd�}d�j�d�}tj r|dz }t�|��t)ddi|�ddi���St9d�j�d�j�����)Nr�r�rc�8�g|]}t|��dk�|��S)�@)r�)r*�hs rRrCz9CachedDatasetModuleFactory.get_module.<locals>.<listcomp>'s#�� N� N� N�1��Q���2���Q���rTc���t���|z �j�d��ddzz ���jS)Nr�rr�)r rlr�stat�st_mtime)� module_hashr�r�s ��rR�_get_modification_timezECachedDatasetModuleFactory.get_module.<locals>._get_modification_time-sE����3�4�4�{�B�d�i�o�o�VY�FZ�FZ�[]�F^�af�Ff�g��T�V�V��rT)r4rz3Using the latest cached version of the module from z (last modified on z() since it couldn't be found locally at z&, or remotely on the Hugging Face Hub.r�rVr��___c�P�g|]#}tj�|���!|��$Sr�)rsrtr�)r*�cached_directory_paths rRrCz9CachedDatasetModuleFactory.get_module.<locals>.<listcomp>Ts=��" �" �" �%��w�}�}�2�3�3�" � !�" �" �" rT�*r�z5Using the latest cached version of the dataset since z* couldn't be found on the Hugging Face Hubz (offline mode is enabled).z%datasets.packaged_modules.cache.cache�auto�versionzDataset z is not cached in )ryrzrsrtrurlrr��listdirr;�time�ctimerr�rrkrr#r|r�r�r�r8r �HF_DATASETS_CACHErr3�globr�)r�ry�hashesrrz� warning_msgr�r�r�r �namespace_and_dataset_name�cached_relative_path�#cached_datasets_directory_path_root�cached_directory_pathsr�s` @rRr�z%CachedDatasetModuleFactory.get_module#se����<@�<U�q�t�8�8�[o�[q�[q��$&�G�L�L�1E�z�SW�S\�Sd�Sd�eh�jn�So�So�$p�$p�!��w�}�}�6�7�7� � N� N�� �#<�=�=� N� N� N� N�� � �# o� � � � � � ��&�&<�=�=�=�b�A�D�<�b�g�l�l�Sl�nr�Fs�Fs�<�<�%)�Z�0F�0F�t�0L�0L�%M�%M�<�<�04� �<�<� � �(� H��G�G� � �N�N�;� '� '� '�#<�%9�!+�"&��Y� $�$�$� � !6�%9�!+�"&��Y� !�!�!� �K�� � '� )� )� )��4�9��N�!��d�N�Ym�n�n�n� n��G�&�&�s�4�>�+U�V�=U�'V�'V�W�W� �%)�Y�_�_�S�%9�%9�"�)?�@Z�[]�@^�)_�)_�"�2�&�$�z�z�*D�E�E��.0�g�l�l�9�FZ�.[�.[�+�" �" �)-��2�7�<�<�@c�eh�jm�or�3s�3s�)t�)t�" �" �" �� "� ��9� $� ���� 4� 4�R� 8���N�H�RV�R[�H�H�H�K��$� =��<�<� � �N�N�;� '� '� '� �7��5�>�5�9�f�5�5��� �  � X�4�9� X� X��� X� X�Y�Y�YrT�NN) r�r�r�r�r8r r�r�r�r�rTrRr r s���������$(�.2� )� )�� )��C�=� )�'�s�m� )� )� )� )�DZ�M�DZ�DZ�DZ�DZ�DZ�DZrTr TFr�r]r c ��|� td3i| ��}t|p tj��}d|_d|_|tjk|_ttd�|� tj d��� d������d} | � d��s| dz} tj�|| ��} |t vr&t#|||||������S|� | ��rctj�|��r%t)||||������St+d t-|�������tj�| ��r%t)| |||������Stj�|��r%t1||||� �����St3|���r.|�d��d k�r t7t8j|jd t>tA|j!��� ��} tE��|�#|t8j$d||j%���}tj�&tj�'|����}�n}#tP$rb}tS|j*tVtXj-j.tXj-j/f��r!t_d|�d|j0j1�d���|��d}~wtd$r |�3||d���j4}Yn�tVtXj-j.tXj-j/f$r&}t_d|�d|j0j1�d���|�d}~wtj$rJ}d|�d�}|j6j7dkr|dz }n|j6j7dkr |d|�d�z }tq|��|�d}~wtr$r}tqd|�d|�d���|�d}~wtt$r}tqd|�d���|�d}~wwxYw |�#|| d||j%���}| s|r |d krd!}nD| r@tw|d"�#��5}d$|�<��v}ddd��n #1swxYwYnd}t8j=rX|rV t}|||�%�����}t~�@d&��|S#t�jB$rYnwxYwt�||||||�'�����S#td$r<|s |s|r |d krd!}nd}t�|||||||�(�����cYStj$rJ}d|�d�}|j6j7dkr|dz }n|j6j7dkr |d|�d�z }tq|��|�d}~wtr$r}tqd|�d|�d���|�d}~wwxYw#t�$�r} t�|||�)�����cYd}~S#t�$r�tS|tV��rt_d*|�d+|����d�tS|t�tpt�f��r|d�tS|t*��rx|r;t+d t-| ���d,|�d-t�|��j1�d.|����d�t+d/t-|���d0|�d-t�|��j1�d.|����d�|d�wxYwd}~wwxYw|r t+d t-| ���d1����t+d/t-|���d2����)4a� Download/extract/cache a dataset module. Dataset codes are cached inside the dynamic modules cache to allow easy import (avoid ugly sys.path tweaks). Args: path (str): Path or name of the dataset. Depending on ``path``, the dataset builder that is used comes from a generic dataset script (JSON, CSV, Parquet, text etc.) or from the dataset script (a python file) inside the dataset directory. For local datasets: - if ``path`` is a local directory (containing data files only) -> load a generic dataset builder (csv, json, text etc.) based on the content of the directory e.g. ``'./path/to/directory/with/my/csv/data'``. - if ``path`` is a local dataset script or a directory containing a local dataset script (if the script has the same name as the directory): -> load the dataset builder from the dataset script e.g. ``'./dataset/squad'`` or ``'./dataset/squad/squad.py'``. For datasets on the Hugging Face Hub (list all available datasets with ``huggingface_hub.list_datasets()``) - if ``path`` is a dataset repository on the HF hub (containing data files only) -> load a generic dataset builder (csv, text etc.) based on the content of the repository e.g. ``'username/dataset_name'``, a dataset repository on the HF hub containing your data files. - if ``path`` is a dataset repository on the HF hub with a dataset script (if the script has the same name as the directory) -> load the dataset builder from the dataset script in the dataset repository e.g. ``glue``, ``squad``, ``'username/dataset_name'``, a dataset repository on the HF hub containing a dataset script `'dataset_name.py'`. revision (:class:`~utils.Version` or :obj:`str`, optional): Version of the dataset script to load. As datasets have their own git repository on the Datasets Hub, the default version "main" corresponds to their "main" branch. You can specify a different version than the default "main" by using a commit SHA or a git tag of the dataset repository. download_config (:class:`DownloadConfig`, optional): Specific download configuration parameters. download_mode (:class:`DownloadMode` or :obj:`str`, default ``REUSE_DATASET_IF_EXISTS``): Download/generate mode. dynamic_modules_path (Optional str, defaults to HF_MODULES_CACHE / "datasets_modules", i.e. ~/.cache/huggingface/modules/datasets_modules): Optional path to the directory in which the dynamic modules are saved. It must have been initialized with :obj:`init_dynamic_modules`. By default, the datasets are stored inside the `datasets_modules` module. data_dir (:obj:`str`, optional): Directory with the data files. Used only if `data_files` is not specified, in which case it's equal to pass `os.path.join(data_dir, "**")` as `data_files`. data_files (:obj:`Union[Dict, List, str]`, optional): Defining the data_files of the dataset configuration. cache_dir (`str`, *optional*): Directory to read/write data. Defaults to `"~/.cache/huggingface/datasets"`. <Added version="2.16.0"/> trust_remote_code (`bool`, *optional*, defaults to `None`): Whether or not to allow for datasets defined on the Hub using a dataset script. This option should only be set to `True` for repositories you trust and in which you have read the code, as it will execute code present on the Hub on your local machine. <Added version="2.16.0"/> <Changed version="2.20.0"> `trust_remote_code` defaults to `False` if not specified. </Changed> **download_kwargs (additional keyword arguments): optional attributes for DownloadConfig() which will override the attributes in download_config if supplied. Returns: DatasetModule NTc��|SrSr�)�xs rR�<lambda>z(dataset_module_factory.<locals>.<lambda>�s��Q�rTr�rr�)r]rNr�r�)r�ryrUz"Couldn't find a dataset script at )r]rNr�rr�r�r�r�zCouldn't reach 'z' on the Hub (�)r�)r�r�z Dataset 'z ' is a gated dataset on the Hub.i�z( You must be authenticated to access it.i�z; Visit the dataset page at https://huggingface.co/datasets/z to ask for access.z Revision 'z' doesn't exist for dataset 'z ' on the Hub.z1' doesn't exist on the Hub or cannot be accessed.�mainFr�r�r�)r�r�z<Loading the dataset from the Parquet export on Hugging Face.)r�r�r�ryrU)r�r]rNr�r�r�)ryr z1Couldn't reach the Hugging Face Hub for dataset 'z': z8 or any data file in the same directory. Couldn't find 'z"' on the Hugging Face Hub either: r�zCouldn't find any data file at z. Couldn't find 'z( or any data file in the same directory.r!r�)Jr&r'�REUSE_DATASET_IF_EXISTS�extract_compressed_file� force_extractr��force_downloadr�r�rrs�seprrFrtrur8r�r�rr�r�rBr�r�rAr2rrr�r�rr?r�r=r�r�r�r"�dirnamer� isinstance� __cause__r�requests� exceptions�Timeoutrr�r�rr�r�r�response� status_coder-rrrxr�r�r�r�infor<r�r�r�rir r,r!�type)rtr�r�r�ryr]rNr rU�_require_default_config_name�_require_custom_configs�download_kwargsr�� combined_pathr�r�r�rs�message�dataset_script_path�#can_load_config_from_parquet_exportr��outr��e1s rR�dataset_module_factoryrFjs( ��X��(�;�;�?�;�;�� ��!V�,�2V�W�W�M�.2�O�+�$(�O�!�%2�l�6S�%S�O�"��F�;�;�� � �R�V�S�(A�(A�(G�(G��(L�(L�M�M�N�N�r�R�H� � � �U� #� #�$��e�#���G�L�L��x�0�0�M�$ �)�)�)�+� ��!�+�'�  � � � �*�,�,�  � ���x� � �kf� �7�>�>�$� � � l�6��+�%9�"3� ��� �j�l�l�  �$�$j�Ib�cg�Ih�Ih�$j�$j�k�k� k� ���� � &� &�af�2� �'�!5�/�  � � � �*�,�,�  � ����t� � �Zf�5� �8� �-� � � � �*�,�,� � �$� � �Uf�D�J�J�s�O�O�q�$8�$8�N #���+�%�+�'� +�2�?�3M�N�N� ���C�. w�1�3�3�3�&)�&9�&9� �#�5�'�%�+�3� ':�'�'�#�!�g�.�.�r�w���?R�/S�/S�T�T� � ��*� � � ���K�,� �+�3� �+�;���� �*�*h�T�*h�*h�QR�Q\�Qe�*h�*h�*h�i�i�op�p������%� � � �!�.�.��%�!�/���� � � � %��#�+��#�3�� m� m� m� &�&d��&d�&d�Q�[�Ma�&d�&d�&d�e�e�kl�l�����!� ;� ;� ;�L�d�L�L�L���:�)�S�0�0��I�I�G�G��Z�+�s�2�2��v�]a�v�v�v�v�G�*�7�3�3��:�����(� � � �*�[��[�[��[�[�[���������+� w� w� w�*�+n�t�+n�+n�+n�o�o�uv�v����� w����= �&)�&9�&9� �%�'�(�+�3� ':�'�'�#�+�?�x�?�H��<N�<N�:?�7�7�1�?��1�G�D�D�D�d��>S�[\�[a�[a�[c�[c�>c�;�d�d�d�d�d�d�d�d�d�d�d����d�d�d�d��;?�7��,� �1T� ��F� �/�{����$�*�,�,��� � �$b�c�c�c�"� ��*�=����������9�� +�$3�"/�)=�&7� ����*�,�,���&� � � ��6�z�6�h�6�8�v�;M�;M�16�.�.�15�.�;�� +�%�)�$3�"/�/I�����*�,�,����"� ;� ;� ;�L�d�L�L�L���:�)�S�0�0��I�I�G�G��Z�+�s�2�2��v�]a�v�v�v�v�G�*�7�3�3��:�����(� � � �*�[��[�[��[�[�[��������� ������ #� #� #� #�1��/C�y�����*�,�,��������� #� #� #��b�"6�7�7�w�)�*k�^b�*k�*k�gi�*k�*k�l�l�rv�v��b�#9�;O�Qb�"c�d�d�'��$�&��b�"3�4�4� $�(� $�/�p�AZ�[h�Ai�Ai�p�p�.2�p�p�VZ�[]�V^�V^�Vg�p�p�km�p�p��� $�$� 0�p�>W�X\�>]�>]�p�p�.2�p�p�VZ�[]�V^�V^�Vg�p�p�km�p�p��� $�$��d�"�# #��������  #����0 �f�� D�1J�=�1Y�1Y� D� D� D� � � � � d�B[�\`�Ba�Ba� d� d� d�e�e�es�6:X�1A3K&�$X�& Q �0AM � *Q �7X�9*Q �#!O� Q �AP� Q �#P:�: Q �Q�Q � X�$?U�#S�: U�S � U� S �U�#?T#�#T5�2U�4T5�5)U�AX�"X�$ X�-AW2�2 X�?X�X�X� ]�*#Y� ]�C+\>�>]�]�featuresr��storage_optionsc  ���t|p tj��}| �+|r|���n t��}| |_| �>|r|���n t��}|j�| ��t�||||||| | t| ���� � }|j }|� d|��}|� d|��}|� d|p |j j ��}|� dd��}|j r|j �|��nd}�tvrR|�P|j jdj�9d��d �}�fd �t$D��}|r|d |d�d �z }t'|���t)||� ��}|d||||||j||| | d� |�| ��}|�|��|S)aLoad a dataset builder which can be used to: - Inspect general information that is required to build a dataset (cache directory, config, dataset info, features, data files, etc.) - Download and prepare the dataset as Arrow files in the cache - Get a streaming dataset without downloading or caching anything You can find the list of datasets on the [Hub](https://huggingface.co/datasets) or with [`huggingface_hub.list_datasets`]. A dataset is a directory that contains some data files in generic formats (JSON, CSV, Parquet, etc.) and possibly in a generic structure (Webdataset, ImageFolder, AudioFolder, VideoFolder, etc.) Args: path (`str`): Path or name of the dataset. - if `path` is a dataset repository on the HF hub (list all available datasets with [`huggingface_hub.list_datasets`]) -> load the dataset builder from supported files in the repository (csv, json, parquet, etc.) e.g. `'username/dataset_name'`, a dataset repository on the HF hub containing the data files. - if `path` is a local directory -> load the dataset builder from supported files in the directory (csv, json, parquet, etc.) e.g. `'./path/to/directory/with/my/csv/data'`. - if `path` is the name of a dataset builder and `data_files` or `data_dir` is specified (available builders are "json", "csv", "parquet", "arrow", "text", "xml", "webdataset", "imagefolder", "audiofolder", "videofolder") -> load the dataset builder from the files in `data_files` or `data_dir` e.g. `'parquet'`. It can also point to a local dataset script but this is not recommended. name (`str`, *optional*): Defining the name of the dataset configuration. data_dir (`str`, *optional*): Defining the `data_dir` of the dataset configuration. If specified for the generic builders (csv, text etc.) or the Hub datasets and `data_files` is `None`, the behavior is equal to passing `os.path.join(data_dir, **)` as `data_files` to reference all the files in a directory. data_files (`str` or `Sequence` or `Mapping`, *optional*): Path(s) to source data file(s). cache_dir (`str`, *optional*): Directory to read/write data. Defaults to `"~/.cache/huggingface/datasets"`. features ([`Features`], *optional*): Set the features type to use for this dataset. download_config ([`DownloadConfig`], *optional*): Specific download configuration parameters. download_mode ([`DownloadMode`] or `str`, defaults to `REUSE_DATASET_IF_EXISTS`): Download/generate mode. revision ([`Version`] or `str`, *optional*): Version of the dataset script to load. As datasets have their own git repository on the Datasets Hub, the default version "main" corresponds to their "main" branch. You can specify a different version than the default "main" by using a commit SHA or a git tag of the dataset repository. token (`str` or `bool`, *optional*): Optional string or boolean to use as Bearer token for remote files on the Datasets Hub. If `True`, or not specified, will get token from `"~/.huggingface"`. storage_options (`dict`, *optional*, defaults to `None`): **Experimental**. Key/value pairs to be passed on to the dataset file-system backend, if any. <Added version="2.11.0"/> trust_remote_code (`bool`, *optional*, defaults to `None`): Whether or not to allow for datasets defined on the Hub using a dataset script. This option should only be set to `True` for repositories you trust and in which you have read the code, as it will execute code present on the Hub on your local machine. <Added version="2.16.0"/> <Changed version="2.20.0"> `trust_remote_code` defaults to `False` if not specified. </Changed> **config_kwargs (additional keyword arguments): Keyword arguments to be passed to the [`BuilderConfig`] and used in the [`DatasetBuilder`]. Returns: [`DatasetBuilder`] Example: ```py >>> from datasets import load_dataset_builder >>> ds_builder = load_dataset_builder('cornell-movie-review-data/rotten_tomatoes') >>> ds_builder.info.features {'label': ClassLabel(names=['neg', 'pos'], id=None), 'text': Value(dtype='string', id=None)} ``` N) r�r�r�r]rNr rUr=r>r]rNrlr�rz@Please specify the data files or data directory to load for the z dataset builder.c�6��g|]}t|�k�|��Sr�)r5)r*� extensionrts �rRrCz(load_dataset_builder.<locals>.<listcomp>s2��� � � �#�?S�T]�?^�bf�?f�?f�I�?f�?f�?frTz9 For example `data_files={"train": "path/to/data/train/*.z"}`)r�) r r�rlr]rNrzr;rGr�rHr�)r'r.r�r&r�rHr�rFr9r�r�r�r�r|r�r8r�rNr5rOr�rz�!_use_legacy_cache_dir_if_possible)rtrlr]rNr rGr�r�r�r�rHrUr=� config_kwargsr�r�rlr�r;� error_msg�example_extensionsr��builder_instances` rR�load_dataset_builderrQ�sm���L!��!V�,�2V�W�W�M� ��4C�Y�/�.�.�0�0�0��IY�IY�� %����"�4C�Y�/�.�.�0�0�0��IY�IY���'�.�.��?�?�?�+� ��'�#����+�%A� $�]� 3� 3� � � �N�$�2�N��!�!�*�h�7�7�H��#�#�L�*�=�=�J� �$�$��t�\�~�H�\���K�"�%�%�n�d�;�;�L�<J�<X� b�>� '� +� +�K� 8� 8� 8�^b�D� �*�*�*� � � � 5� E�a� H� S� [�n�W[�n�n�n� � � � � �';� � � �� � s� �r�Wi�jk�Wl�r�r�r� r�I���#�#�#�+�N��V�V�V�K�'2�{� (��!���� � � ���'� (� (� � (� � (� (���6�6�~�F�F�F� �rTr�verification_mode�keep_in_memory� save_infos� streaming�num_procc ��|�|std|�d����t|tj�����rtd���|r|�t d���t |p t j��}t| s| p tj n tj ��} td ||||||||| | |||dud� |��}|r|� |���S|� ||| ||���| �| nt|jj��} |�|| | � ��}| r|���|S) aB%Load a dataset from the Hugging Face Hub, or a local dataset. You can find the list of datasets on the [Hub](https://huggingface.co/datasets) or with [`huggingface_hub.list_datasets`]. A dataset is a directory that contains some data files in generic formats (JSON, CSV, Parquet, etc.) and possibly in a generic structure (Webdataset, ImageFolder, AudioFolder, VideoFolder, etc.) This function does the following under the hood: 1. Load a dataset builder: * Find the most common data format in the dataset and pick its associated builder (JSON, CSV, Parquet, Webdataset, ImageFolder, AudioFolder, etc.) * Find which file goes into which split (e.g. train/test) based on file and directory names or on the YAML configuration * It is also possible to specify `data_files` manually, and which dataset builder to use (e.g. "parquet"). 2. Run the dataset builder: In the general case: * Download the data files from the dataset if they are not already available locally or cached. * Process and cache the dataset in typed Arrow tables for caching. Arrow table are arbitrarily long, typed tables which can store nested objects and be mapped to numpy/pandas/python generic types. They can be directly accessed from disk, loaded in RAM or even streamed over the web. In the streaming case: * Don't download or cache anything. Instead, the dataset is lazily loaded and will be streamed on-the-fly when iterating on it. 3. Return a dataset built from the requested splits in `split` (default: all). It can also use a custom dataset builder if the dataset contains a dataset script, but this feature is mostly for backward compatibility. In this case the dataset script file must be named after the dataset repository or directory and end with ".py". Args: path (`str`): Path or name of the dataset. - if `path` is a dataset repository on the HF hub (list all available datasets with [`huggingface_hub.list_datasets`]) -> load the dataset from supported files in the repository (csv, json, parquet, etc.) e.g. `'username/dataset_name'`, a dataset repository on the HF hub containing the data files. - if `path` is a local directory -> load the dataset from supported files in the directory (csv, json, parquet, etc.) e.g. `'./path/to/directory/with/my/csv/data'`. - if `path` is the name of a dataset builder and `data_files` or `data_dir` is specified (available builders are "json", "csv", "parquet", "arrow", "text", "xml", "webdataset", "imagefolder", "audiofolder", "videofolder") -> load the dataset from the files in `data_files` or `data_dir` e.g. `'parquet'`. It can also point to a local dataset script but this is not recommended. name (`str`, *optional*): Defining the name of the dataset configuration. data_dir (`str`, *optional*): Defining the `data_dir` of the dataset configuration. If specified for the generic builders (csv, text etc.) or the Hub datasets and `data_files` is `None`, the behavior is equal to passing `os.path.join(data_dir, **)` as `data_files` to reference all the files in a directory. data_files (`str` or `Sequence` or `Mapping`, *optional*): Path(s) to source data file(s). split (`Split` or `str`): Which split of the data to load. If `None`, will return a `dict` with all splits (typically `datasets.Split.TRAIN` and `datasets.Split.TEST`). If given, will return a single Dataset. Splits can be combined and specified like in tensorflow-datasets. cache_dir (`str`, *optional*): Directory to read/write data. Defaults to `"~/.cache/huggingface/datasets"`. features (`Features`, *optional*): Set the features type to use for this dataset. download_config ([`DownloadConfig`], *optional*): Specific download configuration parameters. download_mode ([`DownloadMode`] or `str`, defaults to `REUSE_DATASET_IF_EXISTS`): Download/generate mode. verification_mode ([`VerificationMode`] or `str`, defaults to `BASIC_CHECKS`): Verification mode determining the checks to run on the downloaded/processed dataset information (checksums/size/splits/...). <Added version="2.9.1"/> keep_in_memory (`bool`, defaults to `None`): Whether to copy the dataset in-memory. If `None`, the dataset will not be copied in-memory unless explicitly enabled by setting `datasets.config.IN_MEMORY_MAX_SIZE` to nonzero. See more details in the [improve performance](../cache#improve-performance) section. save_infos (`bool`, defaults to `False`): Save the dataset information (checksums/size/splits/...). revision ([`Version`] or `str`, *optional*): Version of the dataset script to load. As datasets have their own git repository on the Datasets Hub, the default version "main" corresponds to their "main" branch. You can specify a different version than the default "main" by using a commit SHA or a git tag of the dataset repository. token (`str` or `bool`, *optional*): Optional string or boolean to use as Bearer token for remote files on the Datasets Hub. If `True`, or not specified, will get token from `"~/.huggingface"`. streaming (`bool`, defaults to `False`): If set to `True`, don't download the data files. Instead, it streams the data progressively while iterating on the dataset. An [`IterableDataset`] or [`IterableDatasetDict`] is returned instead in this case. Note that streaming works for datasets that use data formats that support being iterated over like txt, csv, jsonl for example. Json files may be downloaded completely. Also streaming from remote zip or gzip files is supported but other compressed formats like rar and xz are not yet supported. The tgz format doesn't allow streaming. num_proc (`int`, *optional*, defaults to `None`): Number of processes when downloading and generating the dataset locally. Multiprocessing is disabled by default. <Added version="2.7.0"/> storage_options (`dict`, *optional*, defaults to `None`): **Experimental**. Key/value pairs to be passed on to the dataset file-system backend, if any. <Added version="2.11.0"/> trust_remote_code (`bool`, *optional*, defaults to `None`): Whether or not to allow for datasets defined on the Hub using a dataset script. This option should only be set to `True` for repositories you trust and in which you have read the code, as it will execute code present on the Hub on your local machine. <Added version="2.16.0"/> <Changed version="2.20.0"> `trust_remote_code` defaults to `False` if not specified. </Changed> **config_kwargs (additional keyword arguments): Keyword arguments to be passed to the `BuilderConfig` and used in the [`DatasetBuilder`]. Returns: [`Dataset`] or [`DatasetDict`]: - if `split` is not `None`: the dataset requested, - if `split` is `None`, a [`~datasets.DatasetDict`] with each split. or [`IterableDataset`] or [`IterableDatasetDict`]: if `streaming=True` - if `split` is not `None`, the dataset is requested - if `split` is `None`, a [`~datasets.streaming.IterableDatasetDict`] with each split. Example: Load a dataset from the Hugging Face Hub: ```py >>> from datasets import load_dataset >>> ds = load_dataset('cornell-movie-review-data/rotten_tomatoes', split='train') # Load a subset or dataset configuration (here 'sst2') >>> from datasets import load_dataset >>> ds = load_dataset('nyu-mll/glue', 'sst2', split='train') # Manual mapping of data files to splits >>> data_files = {'train': 'train.csv', 'test': 'test.csv'} >>> ds = load_dataset('namespace/your_dataset_name', data_files=data_files) # Manual selection of a directory to load >>> ds = load_dataset('namespace/your_dataset_name', data_dir='folder_name') ``` Load a local dataset: ```py # Load a CSV file >>> from datasets import load_dataset >>> ds = load_dataset('csv', data_files='path/to/local/my_dataset.csv') # Load a JSON file >>> from datasets import load_dataset >>> ds = load_dataset('json', data_files='path/to/local/my_dataset.json') # Load from a local loading script (not recommended) >>> from datasets import load_dataset >>> ds = load_dataset('path/to/local/loading_script/loading_script.py', split='train') ``` Load an [`~datasets.IterableDataset`]: ```py >>> from datasets import load_dataset >>> ds = load_dataset('cornell-movie-review-data/rotten_tomatoes', split='train', streaming=True) ``` Load an image dataset with the `ImageFolder` dataset builder: ```py >>> from datasets import load_dataset >>> ds = load_dataset('imagefolder', data_dir='/path/to/images', split='train') ``` NzEmpty 'data_files': 'z3'. It should be either non-empty or None (default).zjYou are trying to load a dataset that was saved using `save_to_disk`. Please use `load_from_disk` instead.z�Loading a streaming dataset in parallel with `num_proc` is not implemented. To parallelize streaming, you can wrap the dataset with a PyTorch DataLoader using `num_workers` > 1 instead.) rtrlr]rNr rGr�r�r�r�rHrUr=rB)r�r�rRrVrH)rrR� in_memoryr�)rOr r�DATASET_STATE_JSON_FILENAMErwr�r'r.rE� BASIC_CHECKS� ALL_CHECKSrQ�as_streaming_dataset�download_and_preparerFr;� dataset_size� as_dataset� _save_infos)rtrlr]rNrr rGr�r�rRrSrTr�r�rUrVrHrUrMrP�dss rR� load_datasetrb.s���X��j���p��p�p�p�q�q�q� �D�&�4�5�5�<�<�>�>� �� 3� � � � � �X�)�!� |� � � � !��!V�,�2V�W�W�M�(�DN�o� � ;�.�;��Td�To���� ,�� � �����'�#���'�+�%)�T�\��� ����$�B��4�4�5�4�A�A�A��)�)�'�#�+��'� *����)�4���:J�K[�K`�Km�:n�:n�� � $� $�5�DU�ao� $� p� p�B��'��$�$�&�&�&� �IrT� dataset_pathc�"�t|fi|pi��^}}|�|��std|�d����|�t j|t j����rI|�t j|t j����rtj |||���S|�t j|t j ����rtj |||���Std|�d����)aJ Loads a dataset that was previously saved using [`~Dataset.save_to_disk`] from a dataset directory, or from a filesystem using any implementation of `fsspec.spec.AbstractFileSystem`. Args: dataset_path (`path-like`): Path (e.g. `"dataset/train"`) or remote URI (e.g. `"s3://my-bucket/dataset/train"`) of the [`Dataset`] or [`DatasetDict`] directory where the dataset/dataset-dict will be loaded from. keep_in_memory (`bool`, defaults to `None`): Whether to copy the dataset in-memory. If `None`, the dataset will not be copied in-memory unless explicitly enabled by setting `datasets.config.IN_MEMORY_MAX_SIZE` to nonzero. See more details in the [improve performance](../cache#improve-performance) section. storage_options (`dict`, *optional*): Key/value pairs to be passed on to the file-system backend, if any. <Added version="2.9.0"/> Returns: [`Dataset`] or [`DatasetDict`]: - If `dataset_path` is a path of a dataset directory: the dataset requested. - If `dataset_path` is a path of a dataset dict directory, a [`DatasetDict`] with each split. Example: ```py >>> from datasets import load_from_disk >>> ds = load_from_disk('path/to/dataset/directory') ``` z Directory z not found)rSrHz@ is neither a `Dataset` directory nor a `DatasetDict` directory.) r rwr�r� posixpathrur�DATASET_INFO_FILENAMErYr�load_from_disk�DATASETDICT_JSON_FILENAMEr$)rcrSrH�fsr>s rRrgrg7s��F�|� ?� ?��(=�2� ?� ?�F�B�� �9�9�\� "� "�G�� E�\� E� E� E�F�F�F� �y�y��� �f�.J�K�K�L�L�  �QS�QZ�QZ���|�V�%G�H�H�R�R�  ��%�l�>�cr�s�s�s�s� ���9�>�,��0P�Q�Q� R� R� ��)�,�~�gv�w�w�w�w�� g�� g� g� g� � � rTrSr'r�) NNNNNNNNTF) NNNNNNNNNNNT)NNNNNNNNNNFNNFNNN)�r�rr r|r�r�rsrer�rdrr�� collectionsr�collections.abcrr� contextlibr� dataclassesrr�pathlibr �typingr r r �fsspecr6r�� fsspec.corer �huggingface_hubrrr�huggingface_hub.utilsrrrrrrrrarr� arrow_datasetr�builderrrrNrrr r!r"r#� dataset_dictr$r%�download.download_configr&�download.download_managerr'�#download.streaming_download_managerr(r)r*r+r7r,r-rGr.� fingerprintr/r;r0r1�iterable_datasetr2�namingr3r4�packaged_modulesr5r6r7r8r9�:packaged_modules.folder_based_builder.folder_based_builderr:�splitsr;�utilsr<�utils.file_utilsr=r>r?r@rArBrC� utils.hubrD�utils.info_utilsrErF� utils.loggingrG�utils.metadatarH�utils.py_utilsrIrJ� utils.typingrK� utils.versionrLr�rr�r�rjrSr9r8rk�MODULE_NAME_FOR_DYNAMIC_MODULESrzr<r�r�r�r�r�r�r7r�r�rrrr#r}r?r<r[rurwr�rr�r�r�r�r�r�r rFrQr:rbrgr�rTrR�<module>r�s��� ������ � � � ��������� � � � � � � � ����� � � � � � � � � � � � �����������-�-�-�-�-�-�-�-�"�"�"�"�"�"�(�(�(�(�(�(�(�(�������'�'�'�'�'�'�'�'�'�'� � � � ����� � � � �!�!�!�!�!�!�?�?�?�?�?�?�?�?�?�?�������������������"�!�!�!�!�!�!�!�"�"�"�"�"�"�2�2�2�2�2�2�2�2�����������������;�:�:�:�:�:�:�:�4�4�4�4�4�4�3�3�3�3�3�3�b�b�b�b�b�b�b�b�b�b�b�b�D�D�D�D�D�D�D�D�������������/�/�/�/�/�/�/�/�-�-�-�-�-�-�B�B�B�B�B�B�B�B���������������[�Z�Z�Z�Z�Z�������"�"�"�"�"�"�������������������&�%�%�%�%�%�@�@�@�@�@�@�@�@�%�%�%�%�%�%�+�+�+�+�+�+�=�=�=�=�=�=�=�=�"�"�"�"�"�"�"�"�"�"�"�"� ��H� � ����7�2�7�9�9�:�:�f�X�E�����!��$��!�#�!�RV�!�!�!�!�J�6�gk� � � � �JR�SX�Y]�_b�Yb�Sc�Jd� � � � �$�h�t�N�/C�&D�����"��������$%$��n�%�%$��-�(�%$�"�#��%$�� %$�  �.�� %$�%$�%$�%$�RDH���#��3;�C�=�� �.������,%�d�3�i�%�C�%�%�%�%�( �c� � � � �,*� �,*��,*�(-�c�3��S�.@�(A�,*�T\�]k�Tl�,*� �4��c�3�h�� �$�u�S�#�X��"7� 7�8�,*�,*�,*�,*�^ �� �t�E�#�s�(�O�7L� �QU� � � � �.Q� �Q�"�Q��Q�� Q� ��c�3�h��(� Q� �5��c��?�+� Q��E�,��"3�4�5�Q� �Q�Q�Q�Q�hc��c��c��c� � c�  � c�c�c�c�P��P���c�3�h��(�P��5��c��?�+�P�� P� � P� � P� �P� �P� �P�P�P�P�2*��*��*��*� � *�  �3��8�_� *�*�*�*�&QU�"�"�"�"�5=�n�5M�"� �8�C�=�$� ��"�"�"�"�LQU�#�#�"�#�5=�n�5M�#� �8�C�=�$� ��#�#�#�#�Nhl�/�/��/�%-�c�]�/�LT�Uc�Ld�/� �8�C�=�$�s�C�x�.� (�)�/�/�/�/�> $�-1�04� 60�60��60�%�60���}�60�!��c��N� 60� �n�-� 60�  �4� � �� #�$� 60�60�60�60�r �.�.�.�.�.�.�.� ��.�" �/�/�/�/�/�/�/� ��/�"�"�"�"�"�"�"�"� Qk�Qk�Qk�Qk�Qk�*?�Qk�Qk�Qk�hi �i �i �i �i �-B�i �i �i �X&@�&@�&@�&@�&@�#8�&@�&@�&@�Re �e �e �e �e �+@�e �e �e �PG �G �G �G �G �/D�G �G �G �T|k�|k�|k�|k�|k�(=�|k�|k�|k�~UZ�UZ�UZ�UZ�UZ�!6�UZ�UZ�UZ�t/3�04�8<�*.�"�BF�#�(,�!%�!�\f�\f� �\f��u�S�'�\�*�+�\f��n�-�\f��E�,��"3�4�5� \f� #�3�-� \f� �s�m� \f���t�T�3� �=�>�?�\f���}�\f� ��~�\f��\f�\f�\f�\f�B �"�_c�#�#'�04�8<�.2�(,�&*�(,�!%�b�b� �b� �3�-�b��s�m�b���s�H�S�M�7�3��c�8�TW�=�FX�@Y�;Y�3Z�Z�[�\� b� ��}� b� �x� � b��n�-�b��E�,��"3�4�5�b��u�S�'�\�*�+�b� �E�$��)�$� %�b��d�^�b� ��~�b��b�b�b�b�N�"�_c�)-�#�#'�04�8<�@D�%)��.2�(,��"�&*�(,�%F�F� �F� �3�-�F��s�m�F���s�H�S�M�7�3��c�8�TW�=�FX�@Y�;Y�3Z�Z�[�\� F� �E�#�u�*�%� &� F� ��}� F��x� �F��n�-�F��E�,��"3�4�5�F� ��&6��&;� <�=�F��T�N�F��F��u�S�'�\�*�+�F� �E�$��)�$� %�F��F� �s�m�!F�"�d�^�#F�$ ��~�%F�( �;��!4�o� E�F�)F�F�F�F�Tfj�/ �/ ��/ �,4�T�N�/ �T\�]a�Tb�/ � �7�K� � �/ �/ �/ �/ �/ �/ rT
Memory