parquet-converter commited on
Commit
1d07dd7
•
1 Parent(s): 3a46cbf

Update parquet files

Browse files
.gitattributes DELETED
@@ -1,38 +0,0 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bin.* filter=lfs diff=lfs merge=lfs -text
5
- *.bz2 filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.model filter=lfs diff=lfs merge=lfs -text
12
- *.msgpack filter=lfs diff=lfs merge=lfs -text
13
- *.onnx filter=lfs diff=lfs merge=lfs -text
14
- *.ot filter=lfs diff=lfs merge=lfs -text
15
- *.parquet filter=lfs diff=lfs merge=lfs -text
16
- *.pb filter=lfs diff=lfs merge=lfs -text
17
- *.pt filter=lfs diff=lfs merge=lfs -text
18
- *.pth filter=lfs diff=lfs merge=lfs -text
19
- *.rar filter=lfs diff=lfs merge=lfs -text
20
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
21
- *.tar.* filter=lfs diff=lfs merge=lfs -text
22
- *.tflite filter=lfs diff=lfs merge=lfs -text
23
- *.tgz filter=lfs diff=lfs merge=lfs -text
24
- *.wasm filter=lfs diff=lfs merge=lfs -text
25
- *.xz filter=lfs diff=lfs merge=lfs -text
26
- *.zip filter=lfs diff=lfs merge=lfs -text
27
- *.zstandard filter=lfs diff=lfs merge=lfs -text
28
- *tfevents* filter=lfs diff=lfs merge=lfs -text
29
- # Audio files - uncompressed
30
- *.pcm filter=lfs diff=lfs merge=lfs -text
31
- *.sam filter=lfs diff=lfs merge=lfs -text
32
- *.raw filter=lfs diff=lfs merge=lfs -text
33
- # Audio files - compressed
34
- *.aac filter=lfs diff=lfs merge=lfs -text
35
- *.flac filter=lfs diff=lfs merge=lfs -text
36
- *.mp3 filter=lfs diff=lfs merge=lfs -text
37
- *.ogg filter=lfs diff=lfs merge=lfs -text
38
- *.wav filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md DELETED
@@ -1,3 +0,0 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
train.pt → default/fer-2013-test.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:22b4a2cb7a0dad26ec86cf2f12c3c8f5aa4a5dca0e59400f07d583d0ee212d1e
3
- size 46253990
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46feb05a10efa40d412982e28a5d554014962ea4cb6f2bfa5ace95f80805b6a9
3
+ size 10151939
test.pt → default/fer-2013-train.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae0b8cac6df796dc0d263387aa0258b930bf5a31f069967d0f58899d2422233f
3
- size 11590857
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b8df59c436a33b34b75eb6301d2afd4f65fc018187a0b614388c10a1595eb97
3
+ size 40442331
fer-2013.py DELETED
@@ -1,66 +0,0 @@
1
- import pickle
2
- from pathlib import Path
3
- from typing import List
4
-
5
- import datasets
6
-
7
- logger = datasets.logging.get_logger(__name__)
8
-
9
-
10
- _HOMEPAGE = "https://www.kaggle.com/datasets/msambare/fer2013"
11
- _URL = "https://huggingface.co/datasets/Jeneral/fer-2013/resolve/main/"
12
- _URLS = {
13
- "train": _URL + "train.pt",
14
- "test": _URL + "test.pt",
15
- }
16
- _DESCRIPTION = "A large set of images of faces with seven emotional classes"
17
- _CITATION = """\
18
- @TECHREPORT{FER2013 dataset,
19
- author = {Prince Awuah Baffour},
20
- title = {Facial Emotion Detection},
21
- institution = {},
22
- year = {2022}
23
- }
24
- """
25
-
26
-
27
- class fer2013(datasets.GeneratorBasedBuilder):
28
- def _info(self):
29
- return datasets.DatasetInfo(
30
- description=_DESCRIPTION,
31
- features=datasets.Features(
32
- {
33
- "img_bytes": datasets.Value("binary"),
34
- "labels": datasets.features.ClassLabel(names=["angry", "disgust", "fear", "happy", "neutral", "sad", "surprise"]),
35
- }
36
- ),
37
- supervised_keys=("img_bytes", "labels"),
38
- homepage=_HOMEPAGE,
39
- citation=_CITATION,
40
- )
41
-
42
- def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
43
- downloaded_files = dl_manager.download_and_extract(_URLS)
44
- return [
45
- datasets.SplitGenerator(
46
- name=datasets.Split.TRAIN,
47
- gen_kwargs={"filepath": downloaded_files["train"]
48
- }
49
- ),
50
- datasets.SplitGenerator(
51
- name=datasets.Split.TEST,
52
- gen_kwargs={
53
- "filepath": downloaded_files["test"],
54
- },
55
- ),
56
- ]
57
-
58
- def _generate_examples(self, filepath):
59
- """This function returns the examples in the raw (text) form."""
60
- logger.info("generating examples from = %s", filepath)
61
-
62
- with Path(filepath).open("rb") as f:
63
- examples = pickle.load(f)
64
-
65
- for i, ex in enumerate(examples):
66
- yield str(i), ex