derek-thomas HF staff commited on
Commit
4d503ef
1 Parent(s): 1a72533

Updated with configurable sep, and prefix, and in-order questions

Browse files
Squad_V1_Question_Generation.ipynb CHANGED
@@ -39,7 +39,7 @@
39
  },
40
  {
41
  "cell_type": "code",
42
- "execution_count": 2,
43
  "metadata": {
44
  "colab": {
45
  "base_uri": "https://localhost:8080/"
@@ -52,47 +52,17 @@
52
  "name": "stdout",
53
  "output_type": "stream",
54
  "text": [
55
- "Requirement already satisfied: datasets in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (2.9.0)\n",
56
- "Requirement already satisfied: huggingface_hub in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (0.12.0)\n",
57
- "Requirement already satisfied: requests>=2.19.0 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (2.28.2)\n",
58
- "Requirement already satisfied: fsspec[http]>=2021.11.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (2023.1.0)\n",
59
- "Requirement already satisfied: pandas in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (1.5.3)\n",
60
- "Requirement already satisfied: dill<0.3.7 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (0.3.6)\n",
61
- "Requirement already satisfied: tqdm>=4.62.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (4.64.1)\n",
62
- "Requirement already satisfied: xxhash in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (3.2.0)\n",
63
- "Requirement already satisfied: aiohttp in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (3.8.3)\n",
64
- "Requirement already satisfied: packaging in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (23.0)\n",
65
- "Requirement already satisfied: responses<0.19 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (0.18.0)\n",
66
- "Requirement already satisfied: multiprocess in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (0.70.14)\n",
67
- "Requirement already satisfied: numpy>=1.17 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (1.24.1)\n",
68
- "Requirement already satisfied: pyyaml>=5.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (6.0)\n",
69
- "Requirement already satisfied: pyarrow>=6.0.0 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from datasets) (11.0.0)\n",
70
- "Requirement already satisfied: typing-extensions>=3.7.4.3 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from huggingface_hub) (4.4.0)\n",
71
- "Requirement already satisfied: filelock in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from huggingface_hub) (3.9.0)\n",
72
- "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (6.0.4)\n",
73
- "Requirement already satisfied: frozenlist>=1.1.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (1.3.3)\n",
74
- "Requirement already satisfied: yarl<2.0,>=1.0 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (1.8.2)\n",
75
- "Requirement already satisfied: aiosignal>=1.1.2 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (1.3.1)\n",
76
- "Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (2.1.1)\n",
77
- "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (4.0.2)\n",
78
- "Requirement already satisfied: attrs>=17.3.0 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from aiohttp->datasets) (22.2.0)\n",
79
- "Requirement already satisfied: idna<4,>=2.5 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from requests>=2.19.0->datasets) (3.4)\n",
80
- "Requirement already satisfied: certifi>=2017.4.17 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from requests>=2.19.0->datasets) (2022.12.7)\n",
81
- "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from requests>=2.19.0->datasets) (1.26.14)\n",
82
- "Requirement already satisfied: pytz>=2020.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from pandas->datasets) (2022.7.1)\n",
83
- "Requirement already satisfied: python-dateutil>=2.8.1 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from pandas->datasets) (2.8.2)\n",
84
- "Requirement already satisfied: six>=1.5 in /opt/homebrew/Caskroom/miniconda/base/envs/flan-t5-e2e-qg/lib/python3.10/site-packages (from python-dateutil>=2.8.1->pandas->datasets) (1.16.0)\n",
85
  "Note: you may need to restart the kernel to use updated packages.\n"
86
  ]
87
  }
88
  ],
89
  "source": [
90
- "%pip install datasets huggingface_hub"
91
  ]
92
  },
93
  {
94
  "cell_type": "code",
95
- "execution_count": 3,
96
  "metadata": {
97
  "id": "otv5EWHoSJfB"
98
  },
@@ -115,7 +85,7 @@
115
  },
116
  {
117
  "cell_type": "code",
118
- "execution_count": 4,
119
  "metadata": {
120
  "colab": {
121
  "base_uri": "https://localhost:8080/",
@@ -172,6 +142,165 @@
172
  "> - You need to [download the file here](https://www.simoninithomas.com/hfdataset/squad_modified_for_t5_qg.zip), unzip it and upload it in the next cell."
173
  ]
174
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
175
  {
176
  "cell_type": "code",
177
  "execution_count": 5,
@@ -680,7 +809,7 @@
680
  "name": "python",
681
  "nbconvert_exporter": "python",
682
  "pygments_lexer": "ipython3",
683
- "version": "3.10.9"
684
  },
685
  "widgets": {
686
  "application/vnd.jupyter.widget-state+json": {
 
39
  },
40
  {
41
  "cell_type": "code",
42
+ "execution_count": 3,
43
  "metadata": {
44
  "colab": {
45
  "base_uri": "https://localhost:8080/"
 
52
  "name": "stdout",
53
  "output_type": "stream",
54
  "text": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  "Note: you may need to restart the kernel to use updated packages.\n"
56
  ]
57
  }
58
  ],
59
  "source": [
60
+ "%pip install --quiet datasets huggingface_hub"
61
  ]
62
  },
63
  {
64
  "cell_type": "code",
65
+ "execution_count": 4,
66
  "metadata": {
67
  "id": "otv5EWHoSJfB"
68
  },
 
85
  },
86
  {
87
  "cell_type": "code",
88
+ "execution_count": 5,
89
  "metadata": {
90
  "colab": {
91
  "base_uri": "https://localhost:8080/",
 
142
  "> - You need to [download the file here](https://www.simoninithomas.com/hfdataset/squad_modified_for_t5_qg.zip), unzip it and upload it in the next cell."
143
  ]
144
  },
145
+ {
146
+ "cell_type": "code",
147
+ "execution_count": 6,
148
+ "metadata": {},
149
+ "outputs": [
150
+ {
151
+ "data": {
152
+ "application/vnd.jupyter.widget-view+json": {
153
+ "model_id": "57095c18c96f443797438d63995225ef",
154
+ "version_major": 2,
155
+ "version_minor": 0
156
+ },
157
+ "text/plain": [
158
+ "Downloading builder script: 0%| | 0.00/5.27k [00:00<?, ?B/s]"
159
+ ]
160
+ },
161
+ "metadata": {},
162
+ "output_type": "display_data"
163
+ },
164
+ {
165
+ "data": {
166
+ "application/vnd.jupyter.widget-view+json": {
167
+ "model_id": "e29c5b6b6b044841afaaf8eafb8fe97f",
168
+ "version_major": 2,
169
+ "version_minor": 0
170
+ },
171
+ "text/plain": [
172
+ "Downloading metadata: 0%| | 0.00/2.36k [00:00<?, ?B/s]"
173
+ ]
174
+ },
175
+ "metadata": {},
176
+ "output_type": "display_data"
177
+ },
178
+ {
179
+ "data": {
180
+ "application/vnd.jupyter.widget-view+json": {
181
+ "model_id": "055b42d0c8884fd29d24c34139b4bc26",
182
+ "version_major": 2,
183
+ "version_minor": 0
184
+ },
185
+ "text/plain": [
186
+ "Downloading readme: 0%| | 0.00/7.67k [00:00<?, ?B/s]"
187
+ ]
188
+ },
189
+ "metadata": {},
190
+ "output_type": "display_data"
191
+ },
192
+ {
193
+ "name": "stdout",
194
+ "output_type": "stream",
195
+ "text": [
196
+ "Downloading and preparing dataset squad/plain_text to /Users/derekthomas/.cache/huggingface/datasets/squad/plain_text/1.0.0/d6ec3ceb99ca480ce37cdd35555d6cb2511d223b9150cce08a837ef62ffea453...\n"
197
+ ]
198
+ },
199
+ {
200
+ "data": {
201
+ "application/vnd.jupyter.widget-view+json": {
202
+ "model_id": "9966d2b8b1d443eebf126a6b76764da4",
203
+ "version_major": 2,
204
+ "version_minor": 0
205
+ },
206
+ "text/plain": [
207
+ "Downloading data files: 0%| | 0/2 [00:00<?, ?it/s]"
208
+ ]
209
+ },
210
+ "metadata": {},
211
+ "output_type": "display_data"
212
+ },
213
+ {
214
+ "data": {
215
+ "application/vnd.jupyter.widget-view+json": {
216
+ "model_id": "9d97ede01c8d4f5d9cef4a9f8cd07768",
217
+ "version_major": 2,
218
+ "version_minor": 0
219
+ },
220
+ "text/plain": [
221
+ "Extracting data files: 0%| | 0/2 [00:00<?, ?it/s]"
222
+ ]
223
+ },
224
+ "metadata": {},
225
+ "output_type": "display_data"
226
+ },
227
+ {
228
+ "data": {
229
+ "application/vnd.jupyter.widget-view+json": {
230
+ "model_id": "",
231
+ "version_major": 2,
232
+ "version_minor": 0
233
+ },
234
+ "text/plain": [
235
+ "Generating train split: 0%| | 0/87599 [00:00<?, ? examples/s]"
236
+ ]
237
+ },
238
+ "metadata": {},
239
+ "output_type": "display_data"
240
+ },
241
+ {
242
+ "data": {
243
+ "application/vnd.jupyter.widget-view+json": {
244
+ "model_id": "205353d5f2f94483ae0f20bbc069a919",
245
+ "version_major": 2,
246
+ "version_minor": 0
247
+ },
248
+ "text/plain": [
249
+ "Generating validation split: 0%| | 0/10570 [00:00<?, ? examples/s]"
250
+ ]
251
+ },
252
+ "metadata": {},
253
+ "output_type": "display_data"
254
+ },
255
+ {
256
+ "name": "stdout",
257
+ "output_type": "stream",
258
+ "text": [
259
+ "Dataset squad downloaded and prepared to /Users/derekthomas/.cache/huggingface/datasets/squad/plain_text/1.0.0/d6ec3ceb99ca480ce37cdd35555d6cb2511d223b9150cce08a837ef62ffea453. Subsequent calls will reuse this data.\n"
260
+ ]
261
+ },
262
+ {
263
+ "data": {
264
+ "application/vnd.jupyter.widget-view+json": {
265
+ "model_id": "1c716c7a59bf40b9a836cb342e314457",
266
+ "version_major": 2,
267
+ "version_minor": 0
268
+ },
269
+ "text/plain": [
270
+ " 0%| | 0/2 [00:00<?, ?it/s]"
271
+ ]
272
+ },
273
+ "metadata": {},
274
+ "output_type": "display_data"
275
+ }
276
+ ],
277
+ "source": [
278
+ "raw_dataset = load_dataset('squad')"
279
+ ]
280
+ },
281
+ {
282
+ "cell_type": "code",
283
+ "execution_count": 9,
284
+ "metadata": {},
285
+ "outputs": [
286
+ {
287
+ "data": {
288
+ "text/plain": [
289
+ "Dataset({\n",
290
+ " features: ['id', 'title', 'context', 'question', 'answers'],\n",
291
+ " num_rows: 87599\n",
292
+ "})"
293
+ ]
294
+ },
295
+ "execution_count": 9,
296
+ "metadata": {},
297
+ "output_type": "execute_result"
298
+ }
299
+ ],
300
+ "source": [
301
+ "raw_dataset['train']"
302
+ ]
303
+ },
304
  {
305
  "cell_type": "code",
306
  "execution_count": 5,
 
809
  "name": "python",
810
  "nbconvert_exporter": "python",
811
  "pygments_lexer": "ipython3",
812
+ "version": "3.10.8"
813
  },
814
  "widgets": {
815
  "application/vnd.jupyter.widget-state+json": {
squad_modified_for_t5_qg.py CHANGED
@@ -67,6 +67,8 @@ class SquadConfig(datasets.BuilderConfig):
67
  class Squad(datasets.GeneratorBasedBuilder):
68
  """SQUAD: The Stanford Question Answering Dataset. Version 1.1."""
69
 
 
 
70
  BUILDER_CONFIGS = [
71
  SquadConfig(
72
  name="plain_text",
@@ -102,6 +104,7 @@ class Squad(datasets.GeneratorBasedBuilder):
102
  datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": downloaded_files["dev"]}),
103
  ]
104
 
 
105
  def _generate_examples(self, filepath):
106
  """This function returns the examples in the raw (text) form."""
107
  logger.info("generating examples from = %s", filepath)
@@ -110,10 +113,18 @@ class Squad(datasets.GeneratorBasedBuilder):
110
  squad = json.load(f)
111
  for article in squad["data"]:
112
  for paragraph in article["paragraphs"]:
113
- source_text = f"generate questions: {paragraph['context'].strip()}"
114
- questions = [qas['question'].strip() for qas in paragraph['qas']]
115
- target_text = " {sep_token} ".join(questions)
116
- target_text = f"{target_text}"
 
 
 
 
 
 
 
 
117
  yield key, {
118
  "context": source_text,
119
  "questions": target_text}
 
67
  class Squad(datasets.GeneratorBasedBuilder):
68
  """SQUAD: The Stanford Question Answering Dataset. Version 1.1."""
69
 
70
+ CONTEXT_PREFIX = 'gq: '
71
+ QUESTIONS_SEP = ' Question: '
72
  BUILDER_CONFIGS = [
73
  SquadConfig(
74
  name="plain_text",
 
104
  datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": downloaded_files["dev"]}),
105
  ]
106
 
107
+
108
  def _generate_examples(self, filepath):
109
  """This function returns the examples in the raw (text) form."""
110
  logger.info("generating examples from = %s", filepath)
 
113
  squad = json.load(f)
114
  for article in squad["data"]:
115
  for paragraph in article["paragraphs"]:
116
+ source_text = self.CONTEXT_PREFIX + paragraph['context'].strip()
117
+
118
+ # Get questions in order
119
+ qas = []
120
+ for qa in paragraph['qas']:
121
+ earliest_answer_start = min([answer['answer_start'] for answer in qa['answers']])
122
+ question = qa['question'].strip()
123
+ qas.append((earliest_answer_start, question))
124
+ sorted_qas = sorted(qas, key=lambda x: x[0])
125
+ only_qs = [qa[1] for qa in sorted_qas]
126
+ target_text = self.QUESTIONS_SEP + self.QUESTIONS_SEP.join(only_qs)
127
+ target_text = target_text.strip()
128
  yield key, {
129
  "context": source_text,
130
  "questions": target_text}