jacobbieker commited on
Commit
36fb866
·
1 Parent(s): 92844bc

Fill out TODOs

Browse files
Files changed (1) hide show
  1. eumetsat_uk_hrv.py +17 -56
eumetsat_uk_hrv.py CHANGED
@@ -1,5 +1,5 @@
1
  # coding=utf-8
2
- # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
3
  #
4
  # Licensed under the Apache License, Version 2.0 (the "License");
5
  # you may not use this file except in compliance with the License.
@@ -12,14 +12,10 @@
12
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
  # See the License for the specific language governing permissions and
14
  # limitations under the License.
15
- # TODO: Address all TODOs and remove all explanatory comments
16
- """TODO: Add a description here."""
17
 
18
 
19
  import xarray
20
- import json
21
- import os
22
-
23
  import datasets
24
 
25
 
@@ -34,43 +30,29 @@ year={2022}
34
  }
35
  """
36
 
37
- # TODO: Add description of the dataset here
38
- # You can copy an official description
39
  _DESCRIPTION = """\
40
- This new dataset is designed to solve this great NLP task and is crafted with a lot of care.
 
 
 
 
 
 
41
  """
42
 
43
- # TODO: Add a link to an official homepage for the dataset here
44
- _HOMEPAGE = ""
45
 
46
- # TODO: Add the licence for the dataset here if you can find it
47
- _LICENSE = ""
48
 
49
- # TODO: Add link to the official dataset URLs here
50
- # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
51
- # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
52
  _URLS = {
53
  "uk_hrv": "gs://public-datasets-eumetsat-solar-forecasting/satellite/EUMETSAT/SEVIRI_RSS/v3/eumetsat_seviri_hrv_uk.zarr",
54
  }
55
 
56
 
57
- # TODO: Name of the dataset usually match the script name with CamelCase instead of snake_case
58
  class EumetsatUkHrvDataset(datasets.GeneratorBasedBuilder):
59
- """TODO: Short description of my dataset."""
60
 
61
  VERSION = datasets.Version("1.1.0")
62
-
63
- # This is an example of a dataset with multiple configurations.
64
- # If you don't want/need to define several sub-sets in your dataset,
65
- # just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
66
-
67
- # If you need to make complex sub-parts in the datasets with configurable options
68
- # You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
69
- # BUILDER_CONFIG_CLASS = MyBuilderConfig
70
-
71
- # You will be able to load one or the other configurations in the following list with
72
- # data = datasets.load_dataset('my_dataset', 'first_domain')
73
- # data = datasets.load_dataset('my_dataset', 'second_domain')
74
  BUILDER_CONFIGS = [
75
  datasets.BuilderConfig(name="uk", version=VERSION, description="This part of the dataset covers the UK"),
76
  ]
@@ -82,7 +64,6 @@ class EumetsatUkHrvDataset(datasets.GeneratorBasedBuilder):
82
  features = datasets.Features(
83
  {
84
  "timestamp": datasets.Value("time64[ns]"),
85
- "channel": datasets.Value("string"),
86
  "image": datasets.Array3D(shape=(1, ), dtype="int16"),
87
  "x_coordinates": datasets.Sequence(datasets.Value("float64")),
88
  "y_coordinates": datasets.Sequence(datasets.Value("float64"))
@@ -105,21 +86,15 @@ class EumetsatUkHrvDataset(datasets.GeneratorBasedBuilder):
105
  )
106
 
107
  def _split_generators(self, dl_manager):
108
- # TODO: This method is tasked with downloading/extracting the data and defining the splits depending on the configuration
109
- # If several configurations are possible (listed in BUILDER_CONFIGS), the configuration selected by the user is in self.config.name
110
-
111
- # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
112
- # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
113
- # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
114
  urls = _URLS[self.config.name]
115
- data_dir = dl_manager.download_and_extract(urls)
116
  return [
117
  datasets.SplitGenerator(
118
  name=datasets.Split.TRAIN,
119
  # These kwargs will be passed to _generate_examples
120
  gen_kwargs={
121
- "filepath": os.path.join(data_dir, "train.jsonl"),
122
- "time_range": None,
123
  "split": "train",
124
  },
125
  ),
@@ -127,34 +102,20 @@ class EumetsatUkHrvDataset(datasets.GeneratorBasedBuilder):
127
  name=datasets.Split.TEST,
128
  # These kwargs will be passed to _generate_examples
129
  gen_kwargs={
130
- "filepath": os.path.join(data_dir, "test.jsonl"),
131
- "time_range": None, # TODO Decide on time range for train, test, val
132
  "split": "test"
133
  },
134
  ),
135
- datasets.SplitGenerator(
136
- name=datasets.Split.VALIDATION,
137
- # These kwargs will be passed to _generate_examples
138
- gen_kwargs={
139
- "filepath": os.path.join(data_dir, "dev.jsonl"),
140
- "time_range": None,
141
- "split": "dev",
142
- },
143
- ),
144
  ]
145
 
146
  # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
147
  def _generate_examples(self, filepath, time_range, split):
148
- # TODO: This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
149
- # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
150
- # TODO Here: Open Xarray, then iterate through the full images, returning them as np.ndarray
151
- # TODO If streaming, then load from GCP directly
152
  sat_data = xarray.open_dataset(filepath, engine="zarr", chunks='auto')
153
  sat_data = sat_data.sel(time=time_range)
154
  for key, entry in enumerate(sat_data):
155
  yield key, {
156
  "timestamp": entry.time.values,
157
- "channel": entry["channels"].values,
158
  "x_coordinates": entry.x_geospatial.values,
159
  "y_coordinates": entry.y_geospatial.values,
160
  "image": entry.values,
 
1
  # coding=utf-8
2
+ # Copyright 2020 The HuggingFace Datasets Authors and Open Climate Fix.
3
  #
4
  # Licensed under the Apache License, Version 2.0 (the "License");
5
  # you may not use this file except in compliance with the License.
 
12
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
  # See the License for the specific language governing permissions and
14
  # limitations under the License.
15
+ """This dataset consists of HRV channel imagery from the EUMETSAT SEVIRI RSS service covering the UK from 2020-2021"""
 
16
 
17
 
18
  import xarray
 
 
 
19
  import datasets
20
 
21
 
 
30
  }
31
  """
32
 
 
 
33
  _DESCRIPTION = """\
34
+ The EUMETSAT Spinning Enhanced Visible and InfraRed Imager (SEVIRI) rapid scanning service (RSS) takes an image of the northern third of the Meteosat disc every five minutes (see the EUMETSAT website for more information on SEVIRI RSS ). The original EUMETSAT dataset contains data from 2008 to the present day from 12 channels, and for a wide geographical extent covering North Africa, Saudi Arabia, all of Europe, and Western Russia. In contrast, this dataset on Google Cloud is a small subset of the entire SEVIRI RSS dataset: This Google Cloud dataset is from a single channel: the "high resolution visible" (HRV) channel; and contains data from January 2020 to November 2021. The geographical extent of this dataset on Google Cloud is a small subset of the total SEVIRI RSS extent: This Google Cloud dataset includes data over the United Kingdom and over North Western Europe.
35
+
36
+ This dataset is slightly transformed: It does not contain the original numerical values.
37
+
38
+ The original data is copyright EUMETSAT. EUMETSAT has given permission to redistribute this transformed data. The data was transformed by Open Climate Fix using satip.
39
+
40
+ This public dataset is hosted in Google Cloud Storage and available free to use.
41
  """
42
 
43
+ _HOMEPAGE = "https://console.cloud.google.com/marketplace/product/bigquery-public-data/eumetsat-seviri-rss-hrv-uk?project=tactile-acrobat-249716"
 
44
 
45
+ _LICENSE = "Cite EUMETSAT as the data source. This data is redistributed with permission from EUMETSAT under the terms of the EUMETSAT Data Policy for SEVIRI data with a latency of >3 hours . This redistributed dataset is released under the CC BY 4.0 open data license & is provided \"AS IS\" without any warranty, express or implied, from Google. Google disclaims all liability for any damages, direct or indirect, resulting from the use of the dataset."
 
46
 
 
 
 
47
  _URLS = {
48
  "uk_hrv": "gs://public-datasets-eumetsat-solar-forecasting/satellite/EUMETSAT/SEVIRI_RSS/v3/eumetsat_seviri_hrv_uk.zarr",
49
  }
50
 
51
 
 
52
  class EumetsatUkHrvDataset(datasets.GeneratorBasedBuilder):
53
+ """This dataset consists of the HRV channel from the EUMETSAT SEVIRI RSS service covering the UK from 2020 to 2021."""
54
 
55
  VERSION = datasets.Version("1.1.0")
 
 
 
 
 
 
 
 
 
 
 
 
56
  BUILDER_CONFIGS = [
57
  datasets.BuilderConfig(name="uk", version=VERSION, description="This part of the dataset covers the UK"),
58
  ]
 
64
  features = datasets.Features(
65
  {
66
  "timestamp": datasets.Value("time64[ns]"),
 
67
  "image": datasets.Array3D(shape=(1, ), dtype="int16"),
68
  "x_coordinates": datasets.Sequence(datasets.Value("float64")),
69
  "y_coordinates": datasets.Sequence(datasets.Value("float64"))
 
86
  )
87
 
88
  def _split_generators(self, dl_manager):
 
 
 
 
 
 
89
  urls = _URLS[self.config.name]
90
+ data_dir = dl_manager.download(urls)
91
  return [
92
  datasets.SplitGenerator(
93
  name=datasets.Split.TRAIN,
94
  # These kwargs will be passed to _generate_examples
95
  gen_kwargs={
96
+ "filepath": data_dir,
97
+ "time_range": slice("2020-01-01", "2020-12-31"),
98
  "split": "train",
99
  },
100
  ),
 
102
  name=datasets.Split.TEST,
103
  # These kwargs will be passed to _generate_examples
104
  gen_kwargs={
105
+ "filepath": data_dir,
106
+ "time_range": slice("2021-01-01", "2021-12-31"),
107
  "split": "test"
108
  },
109
  ),
 
 
 
 
 
 
 
 
 
110
  ]
111
 
112
  # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
113
  def _generate_examples(self, filepath, time_range, split):
 
 
 
 
114
  sat_data = xarray.open_dataset(filepath, engine="zarr", chunks='auto')
115
  sat_data = sat_data.sel(time=time_range)
116
  for key, entry in enumerate(sat_data):
117
  yield key, {
118
  "timestamp": entry.time.values,
 
119
  "x_coordinates": entry.x_geospatial.values,
120
  "y_coordinates": entry.y_geospatial.values,
121
  "image": entry.values,