From 5e7eab6e5791d733e0388983c281d991e570d301 Mon Sep 17 00:00:00 2001
From: Prince kumar <85225054+prince0310@users.noreply.github.com>
Date: Sun, 12 Feb 2023 21:40:56 +0530
Subject: [PATCH 1/3] Add files via upload
run this notebook and create annotation in YOLO format
---
create annotation.ipynb | 565 ++++++++++++++++++++++++++++++++++++++++
1 file changed, 565 insertions(+)
create mode 100644 create annotation.ipynb
diff --git a/create annotation.ipynb b/create annotation.ipynb
new file mode 100644
index 0000000..4e7c94b
--- /dev/null
+++ b/create annotation.ipynb
@@ -0,0 +1,565 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import pandas as pd\n",
+ "import os"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Index(['id', 'classes'], dtype='object')"
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "classes_data = pd.read_csv('/home/ryzenrtx/CBCT_YOLO/men_women/class-descriptions-boxable.csv')\n",
+ "classes_data.columns"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 30,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'Woman': '/m/03bt1vf', 'Man': '/m/04yx4'}"
+ ]
+ },
+ "execution_count": 30,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "class_list = dict()\n",
+ "for id, clas in zip(classes_data['id'],classes_data['classes']):\n",
+ " if clas =='Woman' or clas == \"Man\": # Get the id of the class you want to annotate and add it as or condition\n",
+ " class_list[clas] = id\n",
+ "class_list"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 32,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['Woman', 'Man']"
+ ]
+ },
+ "execution_count": 32,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "classes = list(class_list.keys())\n",
+ "classes"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 35,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['/m/03bt1vf', '/m/04yx4']"
+ ]
+ },
+ "execution_count": 35,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "class_string = [class_list[key] for key in class_list]\n",
+ "class_string"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 30,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " ImageID | \n",
+ " LabelName | \n",
+ " XMin | \n",
+ " XMax | \n",
+ " YMin | \n",
+ " YMax | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 000002b66c9c498e | \n",
+ " /m/01g317 | \n",
+ " 0.012500 | \n",
+ " 0.195312 | \n",
+ " 0.148438 | \n",
+ " 0.587500 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " 000002b66c9c498e | \n",
+ " /m/01g317 | \n",
+ " 0.025000 | \n",
+ " 0.276563 | \n",
+ " 0.714063 | \n",
+ " 0.948438 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 000002b66c9c498e | \n",
+ " /m/01g317 | \n",
+ " 0.151562 | \n",
+ " 0.310937 | \n",
+ " 0.198437 | \n",
+ " 0.590625 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " 000002b66c9c498e | \n",
+ " /m/01g317 | \n",
+ " 0.256250 | \n",
+ " 0.429688 | \n",
+ " 0.651563 | \n",
+ " 0.925000 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " 000002b66c9c498e | \n",
+ " /m/01g317 | \n",
+ " 0.257812 | \n",
+ " 0.346875 | \n",
+ " 0.235938 | \n",
+ " 0.385938 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " ImageID LabelName XMin XMax YMin YMax\n",
+ "0 000002b66c9c498e /m/01g317 0.012500 0.195312 0.148438 0.587500\n",
+ "1 000002b66c9c498e /m/01g317 0.025000 0.276563 0.714063 0.948438\n",
+ "2 000002b66c9c498e /m/01g317 0.151562 0.310937 0.198437 0.590625\n",
+ "3 000002b66c9c498e /m/01g317 0.256250 0.429688 0.651563 0.925000\n",
+ "4 000002b66c9c498e /m/01g317 0.257812 0.346875 0.235938 0.385938"
+ ]
+ },
+ "execution_count": 30,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "annotation_data = pd.read_csv('/home/ryzenrtx/CBCT_YOLO/men_women/train-annotations-bbox.csv', usecols=['ImageID', 'LabelName', 'XMin', 'XMax', 'YMin', 'YMax']) # Path of annotation file\n",
+ "annotation_data.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 31,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " ImageID | \n",
+ " LabelName | \n",
+ " XMin | \n",
+ " XMax | \n",
+ " YMin | \n",
+ " YMax | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 24 | \n",
+ " 000002c707c9895e | \n",
+ " /m/03bt1vf | \n",
+ " 0.053085 | \n",
+ " 0.464849 | \n",
+ " 0.319846 | \n",
+ " 0.839114 | \n",
+ "
\n",
+ " \n",
+ " 50 | \n",
+ " 000004f4400f6ec5 | \n",
+ " /m/04yx4 | \n",
+ " 0.138125 | \n",
+ " 0.621250 | \n",
+ " 0.217554 | \n",
+ " 0.999066 | \n",
+ "
\n",
+ " \n",
+ " 51 | \n",
+ " 000004f4400f6ec5 | \n",
+ " /m/04yx4 | \n",
+ " 0.404375 | \n",
+ " 0.788750 | \n",
+ " 0.171802 | \n",
+ " 0.999066 | \n",
+ "
\n",
+ " \n",
+ " 52 | \n",
+ " 000004f4400f6ec5 | \n",
+ " /m/04yx4 | \n",
+ " 0.615000 | \n",
+ " 0.999375 | \n",
+ " 0.656396 | \n",
+ " 0.999066 | \n",
+ "
\n",
+ " \n",
+ " 84 | \n",
+ " 0000201cd362f303 | \n",
+ " /m/03bt1vf | \n",
+ " 0.250000 | \n",
+ " 0.395000 | \n",
+ " 0.236398 | \n",
+ " 0.729831 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " ImageID LabelName XMin XMax YMin YMax\n",
+ "24 000002c707c9895e /m/03bt1vf 0.053085 0.464849 0.319846 0.839114\n",
+ "50 000004f4400f6ec5 /m/04yx4 0.138125 0.621250 0.217554 0.999066\n",
+ "51 000004f4400f6ec5 /m/04yx4 0.404375 0.788750 0.171802 0.999066\n",
+ "52 000004f4400f6ec5 /m/04yx4 0.615000 0.999375 0.656396 0.999066\n",
+ "84 0000201cd362f303 /m/03bt1vf 0.250000 0.395000 0.236398 0.729831"
+ ]
+ },
+ "execution_count": 31,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "filtered_class_data = annotation_data.loc[annotation_data['LabelName'].isin(class_string)].copy()\n",
+ "filtered_class_data.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 32,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "filtered_class_data['ClassNumber'] = ''\n",
+ "filtered_class_data['center x'] = ''\n",
+ "filtered_class_data['center y'] = ''\n",
+ "filtered_class_data['width'] = ''\n",
+ "filtered_class_data['height'] = ''"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 34,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(2185931, 6)"
+ ]
+ },
+ "execution_count": 34,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "for i in range(len(class_string)):\n",
+ " filtered_class_data.loc[filtered_class_data['LabelName'] == class_string[i], 'ClassNumber'] = i\n",
+ "\n",
+ "filtered_class_data['center x'] = (filtered_class_data['XMax'] + filtered_class_data['XMin'])/2\n",
+ "filtered_class_data['center y'] = (filtered_class_data['YMax'] + filtered_class_data['YMin'])/2\n",
+ "\n",
+ "filtered_class_data['width'] = filtered_class_data['XMax'] - filtered_class_data['XMin']\n",
+ "filtered_class_data['height'] = filtered_class_data['YMax'] - filtered_class_data['YMin']\n",
+ "\n",
+ "YOLO_values = filtered_class_data.loc[:, ['ImageID', 'ClassNumber', 'center x', 'center y', 'width', 'height']].copy()\n",
+ "YOLO_values.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 39,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " ImageID | \n",
+ " ClassNumber | \n",
+ " center x | \n",
+ " center y | \n",
+ " width | \n",
+ " height | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 24 | \n",
+ " 000002c707c9895e | \n",
+ " 1 | \n",
+ " 0.258967 | \n",
+ " 0.579480 | \n",
+ " 0.411764 | \n",
+ " 0.519268 | \n",
+ "
\n",
+ " \n",
+ " 50 | \n",
+ " 000004f4400f6ec5 | \n",
+ " 0 | \n",
+ " 0.379687 | \n",
+ " 0.608310 | \n",
+ " 0.483125 | \n",
+ " 0.781512 | \n",
+ "
\n",
+ " \n",
+ " 51 | \n",
+ " 000004f4400f6ec5 | \n",
+ " 0 | \n",
+ " 0.596562 | \n",
+ " 0.585434 | \n",
+ " 0.384375 | \n",
+ " 0.827264 | \n",
+ "
\n",
+ " \n",
+ " 52 | \n",
+ " 000004f4400f6ec5 | \n",
+ " 0 | \n",
+ " 0.807187 | \n",
+ " 0.827731 | \n",
+ " 0.384375 | \n",
+ " 0.342670 | \n",
+ "
\n",
+ " \n",
+ " 84 | \n",
+ " 0000201cd362f303 | \n",
+ " 1 | \n",
+ " 0.322500 | \n",
+ " 0.483115 | \n",
+ " 0.145000 | \n",
+ " 0.493433 | \n",
+ "
\n",
+ " \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
+ " \n",
+ " 14610164 | \n",
+ " ffffd9716fd38279 | \n",
+ " 0 | \n",
+ " 0.384062 | \n",
+ " 0.686667 | \n",
+ " 0.256875 | \n",
+ " 0.625000 | \n",
+ "
\n",
+ " \n",
+ " 14610169 | \n",
+ " ffffda81903d6bb7 | \n",
+ " 0 | \n",
+ " 0.056875 | \n",
+ " 0.787084 | \n",
+ " 0.113750 | \n",
+ " 0.424167 | \n",
+ "
\n",
+ " \n",
+ " 14610170 | \n",
+ " ffffda81903d6bb7 | \n",
+ " 0 | \n",
+ " 0.296250 | \n",
+ " 0.599167 | \n",
+ " 0.271250 | \n",
+ " 0.800000 | \n",
+ "
\n",
+ " \n",
+ " 14610171 | \n",
+ " ffffda81903d6bb7 | \n",
+ " 0 | \n",
+ " 0.905625 | \n",
+ " 0.390834 | \n",
+ " 0.187500 | \n",
+ " 0.203333 | \n",
+ "
\n",
+ " \n",
+ " 14610215 | \n",
+ " fffffdaec951185d | \n",
+ " 1 | \n",
+ " 0.746563 | \n",
+ " 0.525328 | \n",
+ " 0.359375 | \n",
+ " 0.921201 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
2185931 rows × 6 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " ImageID ClassNumber center x center y width height\n",
+ "24 000002c707c9895e 1 0.258967 0.579480 0.411764 0.519268\n",
+ "50 000004f4400f6ec5 0 0.379687 0.608310 0.483125 0.781512\n",
+ "51 000004f4400f6ec5 0 0.596562 0.585434 0.384375 0.827264\n",
+ "52 000004f4400f6ec5 0 0.807187 0.827731 0.384375 0.342670\n",
+ "84 0000201cd362f303 1 0.322500 0.483115 0.145000 0.493433\n",
+ "... ... ... ... ... ... ...\n",
+ "14610164 ffffd9716fd38279 0 0.384062 0.686667 0.256875 0.625000\n",
+ "14610169 ffffda81903d6bb7 0 0.056875 0.787084 0.113750 0.424167\n",
+ "14610170 ffffda81903d6bb7 0 0.296250 0.599167 0.271250 0.800000\n",
+ "14610171 ffffda81903d6bb7 0 0.905625 0.390834 0.187500 0.203333\n",
+ "14610215 fffffdaec951185d 1 0.746563 0.525328 0.359375 0.921201\n",
+ "\n",
+ "[2185931 rows x 6 columns]"
+ ]
+ },
+ "execution_count": 39,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "YOLO_values"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "img_path = \"/home/ryzenrtx/CBCT_YOLO/OIDv4_ToolKit/OID/Dataset/validation/Woman\" # Path of images\n",
+ "labels_path = \"/home/ryzenrtx/CBCT_YOLO/OIDv4_ToolKit/OID/Dataset/validation/labels_women\" # Path of labels folder\n",
+ "os.chdir(img_path)\n",
+ "\n",
+ "for current_dir, dirs, files in os.walk('.'):\n",
+ " for f in files:\n",
+ " if f.endswith('.jpg'):\n",
+ " img_title = f[:-4]\n",
+ " print(img_title)\n",
+ " YOLO_files = YOLO_values.loc[YOLO_values['ImageID'] == img_title]\n",
+ " df = YOLO_files.loc[:, ['ClassNumber', 'center x', 'center y', 'width', 'height']].copy()\n",
+ "\n",
+ " save_path = labels_path + '/' + img_title + '.txt'\n",
+ "\n",
+ " df.to_csv(save_path, sep=' ', index=False, header=False)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.8.10"
+ },
+ "orig_nbformat": 4,
+ "vscode": {
+ "interpreter": {
+ "hash": "916dbcbb3f70747c44a77c7bcd40155683ae19c65e1c03b4aa3499c5328201f1"
+ }
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
From b6326d6573124014367afd6b5b6ac6b4b777e7a5 Mon Sep 17 00:00:00 2001
From: Prince kumar <85225054+prince0310@users.noreply.github.com>
Date: Sun, 12 Feb 2023 21:43:27 +0530
Subject: [PATCH 2/3] Convert annotation into YOLO format
Convert annotation into YOLO format
---
create annotation.ipynb => convert annotation.ipynb | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename create annotation.ipynb => convert annotation.ipynb (100%)
diff --git a/create annotation.ipynb b/convert annotation.ipynb
similarity index 100%
rename from create annotation.ipynb
rename to convert annotation.ipynb
From 05e9dca971464cf779eb5a926ae8caf6deb2d623 Mon Sep 17 00:00:00 2001
From: Prince kumar <85225054+prince0310@users.noreply.github.com>
Date: Sun, 12 Feb 2023 21:57:21 +0530
Subject: [PATCH 3/3] Update README.md
---
README.md | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 3d1dfef..ffb06c1 100644
--- a/README.md
+++ b/README.md
@@ -233,7 +233,12 @@ Hence with `d` (next), `a` (previous) and `q` (exit) you will be able to explore
-# 5.0 Community Contributions
+# 5.0 Additionaly convert annotation into yolo format
+- Yolo is getting Popularity due to there ease of implementation
+- but to implemnt it need having a particular format of data
+- You can implement ``` convert annotation.ipynb ``` file to convert annotation in form YOLO
+
+# 6.0 Community Contributions
- [Denis Zuenko](https://github.com/zuenko) has added multithreading to the ToolKit and is currently working on the generalization and speeding up process of the labels creation
- [Skylion007](https://github.com/Skylion007) has improved labels creation reducing the runtime from O(nm) to O(n). That massively speeds up label generation
- [Alex March](https://github.com/hosaka) has added the limit option to the ToolKit in order to download only a maximum number of images of a certain class