animelover commited on
Commit
0f1836a
1 Parent(s): c0e9d07

add crawling scripts

Browse files
scripts/crawling_danbooru_by_id.ipynb ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "kernelspec": {
4
+ "language": "python",
5
+ "display_name": "Python 3",
6
+ "name": "python3"
7
+ },
8
+ "language_info": {
9
+ "name": "python",
10
+ "version": "3.7.12",
11
+ "mimetype": "text/x-python",
12
+ "codemirror_mode": {
13
+ "name": "ipython",
14
+ "version": 3
15
+ },
16
+ "pygments_lexer": "ipython3",
17
+ "nbconvert_exporter": "python",
18
+ "file_extension": ".py"
19
+ }
20
+ },
21
+ "nbformat_minor": 4,
22
+ "nbformat": 4,
23
+ "cells": [
24
+ {
25
+ "cell_type": "code",
26
+ "source": [
27
+ "import json\n",
28
+ "import cv2\n",
29
+ "import os\n",
30
+ "import re\n",
31
+ "import requests\n",
32
+ "import numpy as np\n",
33
+ "import base64\n",
34
+ "import urllib\n",
35
+ "import traceback\n",
36
+ "import threading\n",
37
+ "import time\n",
38
+ "from concurrent.futures import ThreadPoolExecutor, wait\n",
39
+ "from tqdm.notebook import tqdm\n",
40
+ "from pathlib import Path\n",
41
+ "from PIL import Image\n",
42
+ "\n",
43
+ "headers = {\n",
44
+ " \"user-agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36\",\n",
45
+ "}\n",
46
+ "headers_pixiv = {\n",
47
+ " \"user-agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36\",\n",
48
+ " 'referer': 'https://www.pixiv.net/'\n",
49
+ "}\n",
50
+ "banned_tags = ['furry', \"realistic\", \"3d\", \"1940s_(style)\",\"1950s_(style)\",\"1960s_(style)\",\"1970s_(style)\",\"1980s_(style)\",\"1990s_(style)\",\"retro_artstyle\",\"screentones\",\"pixel_art\",\"magazine_scan\",\"scan\"]\n",
51
+ "bad_tags = [\"absurdres\",\"jpeg_artifacts\", \"highres\", \"translation_request\", \"translated\", \"commentary\", \"commentary_request\", \"commentary_typo\", \"character_request\", \"bad_id\", \"bad_link\", \"bad_pixiv_id\", \"bad_twitter_id\", \"bad_tumblr_id\", \"bad_deviantart_id\", \"bad_nicoseiga_id\", \"md5_mismatch\", \"cosplay_request\", \"artist_request\", \"wide_image\", \"author_request\", \"artist_name\"]\n",
52
+ "\n",
53
+ "def save_img(img_id, img,tags):\n",
54
+ " output_dir = Path(f\"imgs\")\n",
55
+ " output_dir.mkdir(exist_ok=True)\n",
56
+ " img_path = output_dir / f'{img_id}.jpg'\n",
57
+ " cv2.imwrite(str(img_path), cv2.cvtColor((img * 255).astype(\"uint8\"), cv2.COLOR_RGB2BGR))\n",
58
+ " with open(output_dir / f'{img_id}.txt',\"w\") as f:\n",
59
+ " tags = \", \".join(tags).replace(\"_\",\" \").strip()\n",
60
+ " f.write(tags)\n",
61
+ "\n",
62
+ "def rescale(image, output_size):\n",
63
+ " h,w = image.shape[:2]\n",
64
+ " r = max(output_size / h, output_size / w)\n",
65
+ " new_h, new_w = int(h * r), int(w * r)\n",
66
+ " return cv2.resize(image,(new_w, new_h))\n",
67
+ "\n",
68
+ "def getImage(img_id, retry=0):\n",
69
+ " def retry_fun(msg):\n",
70
+ " if retry < 3:\n",
71
+ " time.sleep(3)\n",
72
+ " print(f\"{img_id} {msg}, retry\")\n",
73
+ " return getImage(img_id, retry + 1)\n",
74
+ " else:\n",
75
+ " return None\n",
76
+ " url = f'https://danbooru.donmai.us/posts/{img_id}.json'\n",
77
+ " try:\n",
78
+ " res = requests.get(url=url, headers=headers, timeout=20)\n",
79
+ " if res.status_code == 404:\n",
80
+ " print(f\"{img_id} get image failed\")\n",
81
+ " return None\n",
82
+ " success = res.status_code == 200\n",
83
+ " except requests.exceptions.RequestException:\n",
84
+ " success = False\n",
85
+ " if not success:\n",
86
+ " return retry_fun(\"get image failed\")\n",
87
+ "\n",
88
+ " res = json.loads(res.text)\n",
89
+ " if res[\"file_ext\"] not in [\"jpg\", \"png\"]:\n",
90
+ " return None\n",
91
+ " img_url = None\n",
92
+ " if 'file_url' in res:\n",
93
+ " img_url = res[\"file_url\"]\n",
94
+ " elif 'source' in res and 'i.pximg.net' in res['source']:\n",
95
+ " img_url = res['source']\n",
96
+ " if img_url is None:\n",
97
+ " return None\n",
98
+ " tags = res[\"tag_string\"]\n",
99
+ " tags = tags.split()\n",
100
+ " tags = [tag for tag in tags if tag not in bad_tags]\n",
101
+ " for tag in banned_tags:\n",
102
+ " if tag in tags:\n",
103
+ " return None\n",
104
+ " try:\n",
105
+ " img_res = requests.get(url=img_url, headers=headers_pixiv, timeout=20)\n",
106
+ " if img_res.status_code == 404:\n",
107
+ " print(f\"{img_id} download failed\")\n",
108
+ " return None\n",
109
+ " success = img_res.status_code == 200\n",
110
+ " except requests.exceptions.RequestException:\n",
111
+ " success = False\n",
112
+ " if not success:\n",
113
+ " return retry_fun(\"download failed\")\n",
114
+ "\n",
115
+ " img = cv2.imdecode(np.frombuffer(img_res.content, np.uint8), cv2.IMREAD_UNCHANGED)\n",
116
+ " if img is None:\n",
117
+ " return retry_fun(\"image decode failed\")\n",
118
+ " img = img.astype(np.float32) / np.iinfo(img.dtype).max\n",
119
+ " if min(img.shape[:2]) < 400:\n",
120
+ " return None\n",
121
+ " if img.shape[0]*img.shape[1] > 25000000:\n",
122
+ " return None\n",
123
+ " if img.shape[-1] == 4:\n",
124
+ " alpha = img[:, :, -1][:, :, np.newaxis]\n",
125
+ " img = (1 - alpha) * 1 + alpha * img[:, :, :-1]\n",
126
+ " if len(img.shape) < 3 or img.shape[-1] == 1:\n",
127
+ " img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)\n",
128
+ " if min(img.shape[:2]) > 768:\n",
129
+ " img = rescale(img, 768)\n",
130
+ " img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n",
131
+ " return img, tags\n",
132
+ "\n",
133
+ "def download_all(start_id, end_id, worker_num=4):\n",
134
+ " global image_total_count\n",
135
+ " image_total_count = 0\n",
136
+ " image_list = list(reversed(range(end_id, start_id)))\n",
137
+ " progres = tqdm(total=len(image_list))\n",
138
+ " max_num = len(image_list)\n",
139
+ " last = {\"id\":-1}\n",
140
+ " def work_fn(iid, idx):\n",
141
+ " try:\n",
142
+ " img_tags = getImage(iid)\n",
143
+ " if img_tags is not None:\n",
144
+ " save_img(iid,img_tags[0],img_tags[1])\n",
145
+ " progres.update(1)\n",
146
+ " except Exception as e:\n",
147
+ " traceback.print_exc()\n",
148
+ " pool = ThreadPoolExecutor(max_workers=worker_num)\n",
149
+ " all_task = []\n",
150
+ " for i, iid in enumerate(image_list):\n",
151
+ " all_task.append(pool.submit(work_fn, iid,i))\n",
152
+ " wait(all_task)\n",
153
+ " pool.shutdown()"
154
+ ],
155
+ "metadata": {
156
+ "_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5",
157
+ "_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19",
158
+ "execution": {
159
+ "iopub.status.busy": "2023-02-14T11:44:24.070496Z",
160
+ "iopub.execute_input": "2023-02-14T11:44:24.071422Z",
161
+ "iopub.status.idle": "2023-02-14T11:44:24.430445Z",
162
+ "shell.execute_reply.started": "2023-02-14T11:44:24.071315Z",
163
+ "shell.execute_reply": "2023-02-14T11:44:24.429283Z"
164
+ },
165
+ "trusted": true,
166
+ "pycharm": {
167
+ "name": "#%%\n"
168
+ }
169
+ },
170
+ "execution_count": 1,
171
+ "outputs": []
172
+ },
173
+ {
174
+ "cell_type": "code",
175
+ "execution_count": null,
176
+ "outputs": [],
177
+ "source": [
178
+ "download_all(6019085,6019085 - 50000,8)"
179
+ ],
180
+ "metadata": {
181
+ "collapsed": false,
182
+ "pycharm": {
183
+ "name": "#%%\n"
184
+ }
185
+ }
186
+ }
187
+ ]
188
+ }
scripts/crawling_danbooru_by_tags.ipynb ADDED
@@ -0,0 +1,227 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "kernelspec": {
4
+ "language": "python",
5
+ "display_name": "Python 3",
6
+ "name": "python3"
7
+ },
8
+ "language_info": {
9
+ "name": "python",
10
+ "version": "3.7.12",
11
+ "mimetype": "text/x-python",
12
+ "codemirror_mode": {
13
+ "name": "ipython",
14
+ "version": 3
15
+ },
16
+ "pygments_lexer": "ipython3",
17
+ "nbconvert_exporter": "python",
18
+ "file_extension": ".py"
19
+ }
20
+ },
21
+ "nbformat_minor": 4,
22
+ "nbformat": 4,
23
+ "cells": [
24
+ {
25
+ "cell_type": "code",
26
+ "source": [
27
+ "import json\n",
28
+ "import cv2\n",
29
+ "import os\n",
30
+ "import re\n",
31
+ "import requests\n",
32
+ "import numpy as np\n",
33
+ "import threading\n",
34
+ "import traceback\n",
35
+ "from concurrent.futures import ThreadPoolExecutor, wait\n",
36
+ "from tqdm.notebook import tqdm\n",
37
+ "from pathlib import Path\n",
38
+ "from PIL import Image\n",
39
+ "\n",
40
+ "headers = {\n",
41
+ " \"user-agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36\",\n",
42
+ "}\n",
43
+ "headers_pixiv = {\n",
44
+ " \"user-agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36\",\n",
45
+ " 'referer': 'https://www.pixiv.net/'\n",
46
+ "}\n",
47
+ "banned_tags = ['furry', 'lineart', 'dark_skin', 'colored_skin', 'blue_skin','green_skin', 'purple_skin', 'grey_skin', 'black_skin',\n",
48
+ " 'pink_skin', 'red_skin', 'condom_in_mouth', 'monster_girl', \"mouth_mask\",\"multiple_views\", \"muscular\",\n",
49
+ " \"muscular_female\",\"pinup_(style)\", \"braces\", \"freckles\", \"glasgow_smile\", \"chibi\", \"animal_head\", \"robot\",\n",
50
+ " \"humanoid_robot\", \"mecha\", \"realistic\", \"3d\", \"panels\", \"everyone\", \"sample_watermark\", \"text_focus\",\n",
51
+ " \"text-only_page\", \"tagme\",\"android\",\"photorealistic\",\"portrait\",\n",
52
+ " \"1970s_(style)\",\"1980s_(style)\",\"1990s_(style)\",\"retro_artstyle\",\"science_fiction\",\"still_life\",\n",
53
+ " \"food_focus\",\"vehicle_focus\", \"battle\",\"non-web_source\",\"blender_(medium)\",\"mikumikudance_(medium)\",\n",
54
+ " \"traditional media\",\"sketch\",\"lineart\",\"monochrome\",\"cover\",\"vaporwave\",\"screentones\",\"flat_color\",\"no_lineart\",\n",
55
+ " \"limited_palette\",\"unfinished\",\"poster\",\"poster_(medium)\",\"promotional_art\",\"4koma\",\"multiple_4koma\",\"pixel_art\",\n",
56
+ " \"magazine_scan\",\"scan\",\"anime_screencap\",\"graphite_(medium)\",\"silhouette\",\"watercolor_(medium)\",\"ukiyo-e\",\"partially_colored\",\n",
57
+ " \"cyberpunk\",\"tarot\",\"steampunk\",\"pokemon_(creature)\",\"marnie_(pokemon)\",\"jpeg_artifacts\",\"film_grain\",\n",
58
+ " \"concept_art\",\"painting_(medium)\",\"marker_(medium)\",\"colored_pencil_(medium)\",\n",
59
+ " \"photo_(medium)\",\"letterboxed\",\"multiple_views\",\"6+girls\",\"6+boys\",\"5girls\",\"5boys\"]\n",
60
+ "bad_tags = [\"absurdres\", \"highres\", \"translation_request\", \"translated\", \"commentary\", \"commentary_request\", \"commentary_typo\", \"character_request\", \"bad_id\", \"bad_link\", \"bad_pixiv_id\", \"bad_twitter_id\", \"bad_tumblr_id\", \"bad_deviantart_id\", \"bad_nicoseiga_id\", \"md5_mismatch\", \"cosplay_request\", \"artist_request\", \"wide_image\", \"author_request\", \"artist_name\"]\n",
61
+ "\n",
62
+ "def save_img(img_id, img,tags):\n",
63
+ " output_dir = Path(f\"imgs\")\n",
64
+ " output_dir.mkdir(exist_ok=True)\n",
65
+ " img_path = output_dir / f'{img_id}.jpg'\n",
66
+ " cv2.imwrite(str(img_path), cv2.cvtColor((img * 255).astype(\"uint8\"), cv2.COLOR_RGB2BGR))\n",
67
+ " with open(output_dir / f'{img_id}.txt',\"w\") as f:\n",
68
+ " tags = \", \".join(tags).replace(\"_\",\" \").strip()\n",
69
+ " f.write(tags)\n",
70
+ "\n",
71
+ "def rescale(image, output_size):\n",
72
+ " h,w = image.shape[:2]\n",
73
+ " r = max(output_size / h, output_size / w)\n",
74
+ " new_h, new_w = int(h * r), int(w * r)\n",
75
+ " return cv2.resize(image,(new_w, new_h))\n",
76
+ "\n",
77
+ "def getImage(img_id, retry=0):\n",
78
+ " def retry_fun(msg):\n",
79
+ " if retry < 3:\n",
80
+ " time.sleep(3)\n",
81
+ " print(f\"{img_id} {msg}, retry\")\n",
82
+ " return getImage(img_id, retry + 1)\n",
83
+ " else:\n",
84
+ " return None\n",
85
+ " url = f'https://danbooru.donmai.us/posts/{img_id}.json'\n",
86
+ " try:\n",
87
+ " res = requests.get(url=url, headers=headers, timeout=20)\n",
88
+ " if res.status_code == 404:\n",
89
+ " print(\"get image failed\")\n",
90
+ " return None\n",
91
+ " success = res.status_code == 200\n",
92
+ " except requests.exceptions.RequestException:\n",
93
+ " success = False\n",
94
+ " if not success:\n",
95
+ " return retry_fun(\"get image failed\")\n",
96
+ "\n",
97
+ " res = json.loads(res.text)\n",
98
+ " if res[\"file_ext\"] not in [\"jpg\", \"png\"]:\n",
99
+ " return None\n",
100
+ " img_url = None\n",
101
+ " if 'file_url' in res:\n",
102
+ " img_url = res[\"file_url\"]\n",
103
+ " elif 'source' in res and 'i.pximg.net' in res['source']:\n",
104
+ " img_url = res['source']\n",
105
+ " if img_url is None:\n",
106
+ " return None\n",
107
+ " tags = res[\"tag_string\"]\n",
108
+ " tags = tags.split()\n",
109
+ " tags = [tag for tag in tags if tag not in bad_tags]\n",
110
+ " for tag in banned_tags:\n",
111
+ " if tag in tags:\n",
112
+ " return None\n",
113
+ " try:\n",
114
+ " img_res = requests.get(url=img_url, headers=headers_pixiv, timeout=20)\n",
115
+ " if img_res.status_code == 404:\n",
116
+ " print(\"download failed\")\n",
117
+ " return None\n",
118
+ " success = img_res.status_code == 200\n",
119
+ " except requests.exceptions.RequestException:\n",
120
+ " success = False\n",
121
+ " if not success:\n",
122
+ " return retry_fun(\"download failed\")\n",
123
+ "\n",
124
+ " img = cv2.imdecode(np.frombuffer(img_res.content, np.uint8), cv2.IMREAD_UNCHANGED)\n",
125
+ " if img is None:\n",
126
+ " return retry_fun(\"image decode failed\")\n",
127
+ " img = img.astype(np.float32) / np.iinfo(img.dtype).max\n",
128
+ " if min(img.shape[:2]) < 400:\n",
129
+ " return None\n",
130
+ " if img.shape[0]*img.shape[1] > 25000000:\n",
131
+ " return None\n",
132
+ " if img.shape[-1] == 4:\n",
133
+ " alpha = img[:, :, -1][:, :, np.newaxis]\n",
134
+ " img = (1 - alpha) * 1 + alpha * img[:, :, :-1]\n",
135
+ " if len(img.shape) < 3 or img.shape[-1] == 1:\n",
136
+ " img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)\n",
137
+ " if min(img.shape[:2]) > 768:\n",
138
+ " img = rescale(img, 768)\n",
139
+ " img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n",
140
+ " return img, tags\n",
141
+ "\n",
142
+ "def spider(start_id, kwd, max_num=1000):\n",
143
+ " count = 0\n",
144
+ " retry_count = 0\n",
145
+ " re_next = False\n",
146
+ " while count < max_num:\n",
147
+ " if retry_count > 3:\n",
148
+ " print(f'retry_count: {retry_count}>3,stopped')\n",
149
+ " break\n",
150
+ " if not re_next:\n",
151
+ " yield start_id\n",
152
+ " count += 1\n",
153
+ " retry_count = 0\n",
154
+ " else:\n",
155
+ " time.sleep(3)\n",
156
+ " retry_count += 1\n",
157
+ " re_next = False\n",
158
+ " next_url = f'https://danbooru.donmai.us/posts/{start_id}/show_seq?q={kwd}&seq=next'\n",
159
+ " try:\n",
160
+ " res = requests.get(url=next_url, headers=headers, allow_redirects=False, timeout=20)\n",
161
+ " if res.status_code != 302:\n",
162
+ " re_next = True\n",
163
+ " print(f'{next_url} status_code is {res.status_code}, retry')\n",
164
+ " continue\n",
165
+ " except requests.exceptions.RequestException:\n",
166
+ " re_next = True\n",
167
+ " print(f'connect error, retry')\n",
168
+ " continue\n",
169
+ " next_url = res.headers['Location']\n",
170
+ " next_id = re.search(r'(?<=posts/).*(?=\\?)', next_url).group()\n",
171
+ " if start_id == next_id: # 到结束时next不会变化\n",
172
+ " break\n",
173
+ " start_id = next_id\n",
174
+ " print(f\"end, last id:{start_id}\")\n",
175
+ "\n",
176
+ "def download_all(start_id, kwd, max_num=1000, worker_num=4):\n",
177
+ " progres = tqdm(total=max_num)\n",
178
+ " def work_fn(iid, idx):\n",
179
+ " try:\n",
180
+ " img_tags = getImage(iid)\n",
181
+ " if img_tags is not None:\n",
182
+ " save_img(iid,img_tags[0],img_tags[1])\n",
183
+ " progres.update(1)\n",
184
+ " except Exception as e:\n",
185
+ " traceback.print_exc()\n",
186
+ " pool = ThreadPoolExecutor(max_workers=worker_num)\n",
187
+ " all_task = []\n",
188
+ " for i, iid in enumerate(tqdm(spider(start_id,kwd,max_num), total=max_num)):\n",
189
+ " all_task.append(pool.submit(work_fn, iid,i))\n",
190
+ " wait(all_task)\n",
191
+ " pool.shutdown()"
192
+ ],
193
+ "metadata": {
194
+ "_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5",
195
+ "_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19",
196
+ "execution": {
197
+ "iopub.status.busy": "2023-02-14T12:05:54.846712Z",
198
+ "iopub.execute_input": "2023-02-14T12:05:54.847715Z",
199
+ "iopub.status.idle": "2023-02-14T12:05:54.885022Z",
200
+ "shell.execute_reply.started": "2023-02-14T12:05:54.847668Z",
201
+ "shell.execute_reply": "2023-02-14T12:05:54.883707Z"
202
+ },
203
+ "trusted": true,
204
+ "pycharm": {
205
+ "name": "#%%\n"
206
+ }
207
+ },
208
+ "execution_count": 2,
209
+ "outputs": []
210
+ },
211
+ {
212
+ "cell_type": "code",
213
+ "source": [
214
+ "#search the tag on danbooru first to get start_id\n",
215
+ "download_all(6060932,\"princess_connect! 1girl\",50000,8)"
216
+ ],
217
+ "metadata": {
218
+ "trusted": true,
219
+ "pycharm": {
220
+ "name": "#%%\n"
221
+ }
222
+ },
223
+ "execution_count": null,
224
+ "outputs": []
225
+ }
226
+ ]
227
+ }