annotation.proto 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347
  1. // Copyright 2019 Google LLC.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. //
  15. syntax = "proto3";
  16. package google.cloud.datalabeling.v1beta1;
  17. import "google/api/annotations.proto";
  18. import "google/api/resource.proto";
  19. import "google/cloud/datalabeling/v1beta1/annotation_spec_set.proto";
  20. import "google/protobuf/duration.proto";
  21. option csharp_namespace = "Google.Cloud.DataLabeling.V1Beta1";
  22. option go_package = "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling";
  23. option java_multiple_files = true;
  24. option java_package = "com.google.cloud.datalabeling.v1beta1";
  25. option php_namespace = "Google\\Cloud\\DataLabeling\\V1beta1";
  26. option ruby_package = "Google::Cloud::DataLabeling::V1beta1";
  27. // Specifies where the annotation comes from (whether it was provided by a
  28. // human labeler or a different source).
  29. enum AnnotationSource {
  30. ANNOTATION_SOURCE_UNSPECIFIED = 0;
  31. // Answer is provided by a human contributor.
  32. OPERATOR = 3;
  33. }
  34. // Annotation for Example. Each example may have one or more annotations. For
  35. // example in image classification problem, each image might have one or more
  36. // labels. We call labels binded with this image an Annotation.
  37. message Annotation {
  38. // Output only. Unique name of this annotation, format is:
  39. //
  40. // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/{annotated_dataset}/examples/{example_id}/annotations/{annotation_id}
  41. string name = 1;
  42. // Output only. The source of the annotation.
  43. AnnotationSource annotation_source = 2;
  44. // Output only. This is the actual annotation value, e.g classification,
  45. // bounding box values are stored here.
  46. AnnotationValue annotation_value = 3;
  47. // Output only. Annotation metadata, including information like votes
  48. // for labels.
  49. AnnotationMetadata annotation_metadata = 4;
  50. // Output only. Sentiment for this annotation.
  51. AnnotationSentiment annotation_sentiment = 6;
  52. }
  53. enum AnnotationSentiment {
  54. ANNOTATION_SENTIMENT_UNSPECIFIED = 0;
  55. // This annotation describes negatively about the data.
  56. NEGATIVE = 1;
  57. // This label describes positively about the data.
  58. POSITIVE = 2;
  59. }
  60. enum AnnotationType {
  61. ANNOTATION_TYPE_UNSPECIFIED = 0;
  62. // Classification annotations in an image. Allowed for continuous evaluation.
  63. IMAGE_CLASSIFICATION_ANNOTATION = 1;
  64. // Bounding box annotations in an image. A form of image object detection.
  65. // Allowed for continuous evaluation.
  66. IMAGE_BOUNDING_BOX_ANNOTATION = 2;
  67. // Oriented bounding box. The box does not have to be parallel to horizontal
  68. // line.
  69. IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION = 13;
  70. // Bounding poly annotations in an image.
  71. IMAGE_BOUNDING_POLY_ANNOTATION = 10;
  72. // Polyline annotations in an image.
  73. IMAGE_POLYLINE_ANNOTATION = 11;
  74. // Segmentation annotations in an image.
  75. IMAGE_SEGMENTATION_ANNOTATION = 12;
  76. // Classification annotations in video shots.
  77. VIDEO_SHOTS_CLASSIFICATION_ANNOTATION = 3;
  78. // Video object tracking annotation.
  79. VIDEO_OBJECT_TRACKING_ANNOTATION = 4;
  80. // Video object detection annotation.
  81. VIDEO_OBJECT_DETECTION_ANNOTATION = 5;
  82. // Video event annotation.
  83. VIDEO_EVENT_ANNOTATION = 6;
  84. // Classification for text. Allowed for continuous evaluation.
  85. TEXT_CLASSIFICATION_ANNOTATION = 8;
  86. // Entity extraction for text.
  87. TEXT_ENTITY_EXTRACTION_ANNOTATION = 9;
  88. // General classification. Allowed for continuous evaluation.
  89. GENERAL_CLASSIFICATION_ANNOTATION = 14;
  90. }
  91. // Annotation value for an example.
  92. message AnnotationValue {
  93. oneof value_type {
  94. // Annotation value for image classification case.
  95. ImageClassificationAnnotation image_classification_annotation = 1;
  96. // Annotation value for image bounding box, oriented bounding box
  97. // and polygon cases.
  98. ImageBoundingPolyAnnotation image_bounding_poly_annotation = 2;
  99. // Annotation value for image polyline cases.
  100. // Polyline here is different from BoundingPoly. It is formed by
  101. // line segments connected to each other but not closed form(Bounding Poly).
  102. // The line segments can cross each other.
  103. ImagePolylineAnnotation image_polyline_annotation = 8;
  104. // Annotation value for image segmentation.
  105. ImageSegmentationAnnotation image_segmentation_annotation = 9;
  106. // Annotation value for text classification case.
  107. TextClassificationAnnotation text_classification_annotation = 3;
  108. // Annotation value for text entity extraction case.
  109. TextEntityExtractionAnnotation text_entity_extraction_annotation = 10;
  110. // Annotation value for video classification case.
  111. VideoClassificationAnnotation video_classification_annotation = 4;
  112. // Annotation value for video object detection and tracking case.
  113. VideoObjectTrackingAnnotation video_object_tracking_annotation = 5;
  114. // Annotation value for video event case.
  115. VideoEventAnnotation video_event_annotation = 6;
  116. }
  117. }
  118. // Image classification annotation definition.
  119. message ImageClassificationAnnotation {
  120. // Label of image.
  121. AnnotationSpec annotation_spec = 1;
  122. }
  123. // A vertex represents a 2D point in the image.
  124. // NOTE: the vertex coordinates are in the same scale as the original image.
  125. message Vertex {
  126. // X coordinate.
  127. int32 x = 1;
  128. // Y coordinate.
  129. int32 y = 2;
  130. }
  131. // A vertex represents a 2D point in the image.
  132. // NOTE: the normalized vertex coordinates are relative to the original image
  133. // and range from 0 to 1.
  134. message NormalizedVertex {
  135. // X coordinate.
  136. float x = 1;
  137. // Y coordinate.
  138. float y = 2;
  139. }
  140. // A bounding polygon in the image.
  141. message BoundingPoly {
  142. // The bounding polygon vertices.
  143. repeated Vertex vertices = 1;
  144. }
  145. // Normalized bounding polygon.
  146. message NormalizedBoundingPoly {
  147. // The bounding polygon normalized vertices.
  148. repeated NormalizedVertex normalized_vertices = 1;
  149. }
  150. // Image bounding poly annotation. It represents a polygon including
  151. // bounding box in the image.
  152. message ImageBoundingPolyAnnotation {
  153. // The region of the polygon. If it is a bounding box, it is guaranteed to be
  154. // four points.
  155. oneof bounded_area {
  156. BoundingPoly bounding_poly = 2;
  157. NormalizedBoundingPoly normalized_bounding_poly = 3;
  158. }
  159. // Label of object in this bounding polygon.
  160. AnnotationSpec annotation_spec = 1;
  161. }
  162. // A line with multiple line segments.
  163. message Polyline {
  164. // The polyline vertices.
  165. repeated Vertex vertices = 1;
  166. }
  167. // Normalized polyline.
  168. message NormalizedPolyline {
  169. // The normalized polyline vertices.
  170. repeated NormalizedVertex normalized_vertices = 1;
  171. }
  172. // A polyline for the image annotation.
  173. message ImagePolylineAnnotation {
  174. oneof poly {
  175. Polyline polyline = 2;
  176. NormalizedPolyline normalized_polyline = 3;
  177. }
  178. // Label of this polyline.
  179. AnnotationSpec annotation_spec = 1;
  180. }
  181. // Image segmentation annotation.
  182. message ImageSegmentationAnnotation {
  183. // The mapping between rgb color and annotation spec. The key is the rgb
  184. // color represented in format of rgb(0, 0, 0). The value is the
  185. // AnnotationSpec.
  186. map<string, AnnotationSpec> annotation_colors = 1;
  187. // Image format.
  188. string mime_type = 2;
  189. // A byte string of a full image's color map.
  190. bytes image_bytes = 3;
  191. }
  192. // Text classification annotation.
  193. message TextClassificationAnnotation {
  194. // Label of the text.
  195. AnnotationSpec annotation_spec = 1;
  196. }
  197. // Text entity extraction annotation.
  198. message TextEntityExtractionAnnotation {
  199. // Label of the text entities.
  200. AnnotationSpec annotation_spec = 1;
  201. // Position of the entity.
  202. SequentialSegment sequential_segment = 2;
  203. }
  204. // Start and end position in a sequence (e.g. text segment).
  205. message SequentialSegment {
  206. // Start position (inclusive).
  207. int32 start = 1;
  208. // End position (exclusive).
  209. int32 end = 2;
  210. }
  211. // A time period inside of an example that has a time dimension (e.g. video).
  212. message TimeSegment {
  213. // Start of the time segment (inclusive), represented as the duration since
  214. // the example start.
  215. google.protobuf.Duration start_time_offset = 1;
  216. // End of the time segment (exclusive), represented as the duration since the
  217. // example start.
  218. google.protobuf.Duration end_time_offset = 2;
  219. }
  220. // Video classification annotation.
  221. message VideoClassificationAnnotation {
  222. // The time segment of the video to which the annotation applies.
  223. TimeSegment time_segment = 1;
  224. // Label of the segment specified by time_segment.
  225. AnnotationSpec annotation_spec = 2;
  226. }
  227. // Video frame level annotation for object detection and tracking.
  228. message ObjectTrackingFrame {
  229. // The bounding box location of this object track for the frame.
  230. oneof bounded_area {
  231. BoundingPoly bounding_poly = 1;
  232. NormalizedBoundingPoly normalized_bounding_poly = 2;
  233. }
  234. // The time offset of this frame relative to the beginning of the video.
  235. google.protobuf.Duration time_offset = 3;
  236. }
  237. // Video object tracking annotation.
  238. message VideoObjectTrackingAnnotation {
  239. // Label of the object tracked in this annotation.
  240. AnnotationSpec annotation_spec = 1;
  241. // The time segment of the video to which object tracking applies.
  242. TimeSegment time_segment = 2;
  243. // The list of frames where this object track appears.
  244. repeated ObjectTrackingFrame object_tracking_frames = 3;
  245. }
  246. // Video event annotation.
  247. message VideoEventAnnotation {
  248. // Label of the event in this annotation.
  249. AnnotationSpec annotation_spec = 1;
  250. // The time segment of the video to which the annotation applies.
  251. TimeSegment time_segment = 2;
  252. }
  253. // Additional information associated with the annotation.
  254. message AnnotationMetadata {
  255. // Metadata related to human labeling.
  256. OperatorMetadata operator_metadata = 2;
  257. }
  258. // General information useful for labels coming from contributors.
  259. message OperatorMetadata {
  260. // Confidence score corresponding to a label. For examle, if 3 contributors
  261. // have answered the question and 2 of them agree on the final label, the
  262. // confidence score will be 0.67 (2/3).
  263. float score = 1;
  264. // The total number of contributors that answer this question.
  265. int32 total_votes = 2;
  266. // The total number of contributors that choose this label.
  267. int32 label_votes = 3;
  268. // Comments from contributors.
  269. repeated string comments = 4;
  270. }