dataset_info: | |
features: | |
- name: text | |
dtype: string | |
id: field | |
- name: label | |
list: | |
- name: user_id | |
dtype: string | |
id: question | |
- name: value | |
dtype: string | |
id: suggestion | |
- name: status | |
dtype: string | |
id: question | |
- name: label-suggestion | |
dtype: string | |
id: suggestion | |
- name: label-suggestion-metadata | |
struct: | |
- name: type | |
dtype: string | |
id: suggestion-metadata | |
- name: score | |
dtype: float32 | |
id: suggestion-metadata | |
- name: agent | |
dtype: string | |
id: suggestion-metadata | |
- name: external_id | |
dtype: string | |
id: external_id | |
- name: metadata | |
dtype: string | |
id: metadata | |
- name: vectors | |
struct: | |
- name: sentence_embedding | |
sequence: float32 | |
id: vectors | |
splits: | |
- name: train | |
num_bytes: 1834637 | |
num_examples: 1000 | |
download_size: 2334692 | |
dataset_size: 1834637 | |
configs: | |
- config_name: default | |
data_files: | |
- split: train | |
path: data/train-* | |
# Dataset Card for "end2end_textclassification_with_vectors" | |
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |