diff --git a/p__Radar__index.async.js b/p__Radar__index.async.js index feca98b..92e01db 100644 --- a/p__Radar__index.async.js +++ b/p__Radar__index.async.js @@ -1 +1 @@ -"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[842],{79512:function(Qe,F,p){p.r(F),p.d(F,{default:function(){return Xe}});var K=p(56690),y=p.n(K),X=p(89728),R=p.n(X),Z=p(61655),S=p.n(Z),Y=p(26389),C=p.n(Y),k=p(62435),E=p(80840),Q=p(42122),o=p.n(Q),$=p(70215),M=p.n($),q=p(66115),w=p.n(q),ee=p(38416),O=p.n(ee),T=p(84289),ae=p(43033),e=p(86074),te=["dataSource","isMobile"],ne=function(v){S()(r,v);var m=C()(r);function r(b){var i;return y()(this,r),i=m.call(this,b),O()(w()(i),"phoneClick",function(){var u=!i.state.phoneOpen;i.setState({phoneOpen:u})}),i.state={phoneOpen:!1},i}return R()(r,[{key:"render",value:function(){var i=this,u=this.props,h=u.dataSource,g=u.isMobile,f=M()(u,te),l=this.state.phoneOpen,n=h.LinkMenu,a=n.children,d=Object.keys(a).map(function(c,s){var _=a[c],I=ae.rU,j={};return _.to&&_.to.match(/\//g)&&(j.href=_.to,j.target="_blank",I="a",delete _.to),k.createElement(I,o()(o()(o()({},_),j),{},{key:s.toString()}),a[c].children)}),t=l===void 0?300:null;return(0,e.jsx)(T.ZP,o()(o()(o()({component:"header",animation:{opacity:0,type:"from"}},h.wrapper),f),{},{children:(0,e.jsxs)("div",o()(o()({},h.page),{},{className:"".concat(h.page.className).concat(l?" open":""),children:[(0,e.jsx)(T.ZP,o()(o()({animation:{x:-30,type:"from",ease:"easeOutQuad"}},h.logo),{},{children:(0,e.jsx)("img",{width:"100%",src:h.logo.children,alt:"img"})})),g&&(0,e.jsxs)("div",o()(o()({},h.mobileMenu),{},{onClick:function(){i.phoneClick()},children:[(0,e.jsx)("em",{}),(0,e.jsx)("em",{}),(0,e.jsx)("em",{})]})),(0,e.jsx)(T.ZP,o()(o()({},n),{},{animation:g?{height:0,duration:300,onComplete:function(s){i.state.phoneOpen&&(s.target.style.height="auto")},ease:"easeInOutQuad"}:null,moment:t,reverse:!!l,children:d}))]}))}))}}]),r}(k.Component),re=ne,oe=p(13012),D=p.n(oe),W=p(72806),V=p(1289),ie=p(72575),$e=p(83154),se=["name","texty"],ce=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=u.textWrapper.children.map(function(g){var f=g.name,l=g.texty,n=M()(g,se);return f.match("button")?(0,e.jsx)(W.Z,o()(o()({type:"primary"},n),{},{children:g.children}),f):(0,e.jsx)("div",o()(o()({},n),{},{children:l?(0,e.jsx)(ie.Z,{type:"mask-bottom",children:g.children}):g.children}),f)});return(0,e.jsx)("div",o()(o()(o()({},i),u.wrapper),{},{children:(0,e.jsx)(V.Z,o()(o()({type:["bottom","top"],delay:200},u.textWrapper),{},{children:h}),"QueueAnim")}))}}]),r}(k.PureComponent),de=ce,le=p(3600),L=p.n(le),x=p(91587),ue=p(18698),pe=p.n(ue),G=/^http(s)?:\/\/([\w-]+\.)+[\w-]+(\/[\w-./?%&=]*)?/,B=function(m,r){var b=m.name.indexOf("title")===0?"h1":"div";b=m.href?"a":b;var i=typeof m.children=="string"&&m.children.match(G)?k.createElement("img",{src:m.children,alt:"img"}):m.children;return m.name.indexOf("button")===0&&pe()(m.children)==="object"&&(i=k.createElement(W.Z,o()({},m.children))),k.createElement(b,o()({key:r.toString()},m),i)},me=["childWrapper"],he=["columns","dataSource"],ge=["dataSource","isMobile"],fe=["columns","dataSource"],qe=function(v){S()(r,v);var m=C()(r);function r(){var b;y()(this,r);for(var i=arguments.length,u=new Array(i),h=0;h=0}),d=f.children.filter(function(t){return t.key.indexOf("name")===-1});return d.map(function(t,c){var s=[].concat(a[0],t).filter(function(j){return j});s.length>1&&(s[0].colSpan=0,s[1].colSpan=2);var _=l.children.map(function(j){var Ze=j.children.filter(function(P){return P.name.indexOf("name")===-1}),Ye=j.children.filter(function(P){return P.name.indexOf("name")>=0});return o()(o()({},j),{},{children:[].concat(Ye[0],Ze[c]).filter(function(P){return P})})}),I=o()(o()({},n),{},{columns:b.getColumns(s),dataSource:b.getDataSource(_,s)});return(0,e.jsx)(x.Z,o()(o()({},I),{},{pagination:!1,bordered:!0}),c.toString())})}),b}return R()(r,[{key:"render",value:function(){var i=this.props,u=i.dataSource,h=i.isMobile,g=M()(i,ge),f=u.Table,l=u.wrapper,n=u.page,a=u.titleWrapper,d=f.columns,t=f.dataSource,c=M()(f,fe),s=o()(o()({},c),{},{columns:this.getColumns(d.children),dataSource:this.getDataSource(t.children,d.children)}),_=h?this.getMobileChild(f):(0,e.jsx)(x.Z,o()(o()({},s),{},{pagination:!1,bordered:!0}),"table");return(0,e.jsx)("div",o()(o()(o()({},g),l),{},{children:(0,e.jsx)("div",o()({},n))}))}}]),r}(k.PureComponent),ea=null,aa=p(61254),_e=p.p+"static/cover.df8b2232.png",ve=p(46889),be=p(45098),ke=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{name:"Color, Texture, Shape",star:1},{name:"Range Measurement",star:5},{name:"Velocity Measurement",star:5},{name:"Lighting Robustness",star:5},{name:"Weather Robustness",star:5},{name:"Classification Ability",star:2},{name:"3D Perception",star:1},{name:"Cost Advantage",star:4}],g={data:h.map(function(t){return o()(o()({},t),{},{star:t.star})}),xField:"name",yField:"star",appendPadding:[0,20,0,20],color:"#B2934A",legend:!0,meta:{star:{alias:"Radar Ability",min:0,nice:!0,formatter:function(c){return c}}},xAxis:{tickLine:null},yAxis:{label:!1,grid:{alternateColor:"rgba(0, 0, 0, 0.04)"}},point:{size:2},area:{}},f=[{name:"Color, Texture, Shape",star:5},{name:"Range Measurement",star:2},{name:"Velocity Measurement",star:2},{name:"Lighting Robustness",star:3},{name:"Weather Robustness",star:3},{name:"Classification Ability",star:5},{name:"3D Perception",star:3},{name:"Cost Advantage",star:5}],l={data:f.map(function(t){return o()(o()({},t),{},{star:t.star})}),xField:"name",yField:"star",appendPadding:[0,20,0,20],color:"#B66A6A",meta:{star:{alias:"Camera Ability",min:0,nice:!0,formatter:function(c){return c}}},xAxis:{tickLine:null},yAxis:{label:!1,grid:{alternateColor:"rgba(0, 0, 0, 0.04)"}},point:{size:2},area:{}},n=[{name:"Color, Texture, Shape",star:5},{name:"Range Measurement",star:5},{name:"Velocity Measurement",star:5},{name:"Lighting Robustness",star:5},{name:"Weather Robustness",star:5},{name:"Classification Ability",star:5},{name:"3D Perception",star:3},{name:"Cost Advantage",star:4}],a={data:n.map(function(t){return o()(o()({},t),{},{star:t.star})}),xField:"name",yField:"star",color:"#589D9D",meta:{star:{alias:"Fusion Ability",min:0,nice:!0,formatter:function(c){return c}}},xAxis:{tickLine:null},yAxis:{label:!1,grid:{alternateColor:"rgba(0, 0, 0, 0.04)"}},point:{size:2},area:{}},d=function(c,s,_,I){console.log("params",c,s,_,I)};return(0,e.jsx)("div",{class:"home-page-wrapper content6-wrapper",children:(0,e.jsxs)("div",{class:"ant-row home-page content6",id:"WaterScenes",children:[(0,e.jsx)("div",{class:"ant-col content6-text ant-col-xs-2 ant-col-md-2"}),(0,e.jsx)("div",{class:"ant-col content6-text ant-col-xs-20 ant-col-md-20",children:(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("div",{className:"chart",children:(0,e.jsx)(ve.Z,{src:_e})})})})]})})}}]),r}(k.PureComponent),ye=ke,A=p(20550),Re=p(27049),Se=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[],g=[40,.5,13,12,3,11,44,100,400,393,49,10,4,8,8,7,40,35,26,54],f=[60,8,30,28,10,26,65,70,90,88,60,24,12,21,21,18,60,55,40,66];(0,be.S6)(g,function(d,t){h.push({type:"text",position:[t,f[t]],content:"".concat(d,"k"),style:{textAlign:"center",fontSize:14,fill:"rgba(0,0,0,0.85)"},offsetY:-10})});var l=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name",width:"10%",render:function(t,c){return(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:t[1],children:t[0]})," [",(0,e.jsx)("a",{href:"#references",children:t[2]}),"]"]})}},{title:"Year",dataIndex:"year",sorter:function(t,c){return t.year-c.year}},{title:"Radar Data Representation",dataIndex:"radar_data_representation",filters:[{text:"Point Cloud",value:"Point Cloud"},{text:"Radar Tensor",value:"Radar Tensor"}],onFilter:function(t,c){return c.radar_data_representation.includes(t)},filterSearch:!0,render:function(t,c){return(0,e.jsx)("span",{children:t.map(function(s){var _="";switch(s){case"Point Cloud":_="#108ee9";break;case"ADC Signal":_="#f50";break;case"Radar Tensor":_="#2db7f5";case"Grid Map":_="#87d068";break;case"Micro-Doppler Signature":_="#2db7f5";default:_="#108ee9"}return(0,e.jsx)(A.Z,{color:_,children:s},s)})})}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(t,c){return c.task.includes(t)},filterSearch:!0,render:function(t,c){return(0,e.jsx)("span",{children:t.map(function(s){var _="";switch(s){case"Object Detection":_="#1890ff";break;case"Semantic Segmentation":_="#fa541c";break;case"Object Tracking":_="#fa8c16";break;case"Localization":_="#13c2c2";break;case"Planning":_="#52c41a";break;case"Prediction":_="#f5222d";break;case"":_="#722ed1";break;case"":_="#eb2f96";break;case"":_="#722ed1";break;default:_="blue-inverse"}return(0,e.jsx)(A.Z,{color:_,children:s},s)})})}},{title:"Sensors",dataIndex:"sensors"},{title:"Category Number",dataIndex:"category_number"},{title:"Categories",dataIndex:"categories"},x.Z.EXPAND_COLUMN,{title:"Record Area",dataIndex:"record_area"},{title:"Record Time",dataIndex:"record_time"},{title:"Affiliation",dataIndex:"affiliation"}],n=[{key:"1",name:["nuScenes","https://www.nuscenes.org/nuscenes","1"],year:2019,task:["Object Detection","Object Tracking"],sensors:"Radar (Continental ARS408), Camera,LiDAR",radar_data_representation:["Point Cloud","Grid Map"],category_number:23,categories:"different vehicles, types of pedestrians, mobility devices and other objects",scenarios:"Roads (intersection, crosswalk, roundabout, pedestrian crossing)",record_area:"Boston, Singapore",record_time:"September 2018",affiliation:"nuTonomy",paper_link:"https://openaccess.thecvf.com/content_CVPR_2020/papers/Caesar_nuScenes_A_Multimodal_Dataset_for_Autonomous_Driving_CVPR_2020_paper.pdf"},{key:"2",name:["Astyx","http://www.astyx.net","2"],year:2019,task:["Object Detection"],sensors:"Radar (Astyx 6455 HiRes), Camera, Lidar",radar_data_representation:["Point Cloud"],category_number:7,categories:"Bus, Car, Cyclist, Motorcyclist, Person, Trailer, Truck",scenarios:" Roads (highway, urban, rural, parking, roundabout)",record_area:"South of Germany",record_time:"-",affiliation:"Technical University of Munich",paper_link:"https://ieeexplore.ieee.org/abstract/document/8904734"},{key:"3",name:["SeeingThroughFog","https://www.uni-ulm.de/en/in/driveu/projects/dense-datasets/","3"],year:2020,task:["Object Detection"],sensors:"Radar (77GHz), Stereo/Gated/FIRCamera, Lidar, Environmental Sensors",radar_data_representation:["Point Cloud"],category_number:4,categories:"Passenger Car, Large Vehicle, Pedestrian, Ridable Vehicle",scenarios:" Adverse road conditions (clear, rainy, snowy, foggy, nighttime, urban, highway, rural, traffic)",record_area:"Germany, Sweden, Denmark, and Finland",record_time:"February and December 2019",affiliation:"Mercedes-Benz AG",paper_link:"https://openaccess.thecvf.com/content_CVPR_2020/html/Bijelic_Seeing_Through_Fog_Without_Seeing_Fog_Deep_Multimodal_Sensor_Fusion_CVPR_2020_paper.html"},{key:"4",name:["CARRADA","https://arthurouaknine.github.io/codeanddata/carrada","4"],year:2020,task:["Detection","Semantic Segmentation","Object Tracking"],sensors:"Radar (TI AWR1843), RGB-DCamera, LiDAR",radar_data_representation:["Radar Tensor"],category_number:9,categories:"car, pedestrian, cyclist, and motorbike,radar range, angle, and velocity,radar range, angle, and velocity",scenarios:"Roads (urban, highway, intersection scenarios)",record_area:"Canada",record_time:"-",affiliation:"-",paper_link:"https://ieeexplore.ieee.org/document/9413181"},{key:"5",name:["Zendar","http://zendar.io/dataset","5"],year:2020,task:["Object Detection","Mapping","Localization"],sensors:"Radar (synthetic aperture), Camera,LiDAR",radar_data_representation:["Radar Tensor","Point Cloud"],category_number:3,categories:"vehicles, pedestrians, and cyclists",scenarios:"Roads (diverse urban driving environments)",record_area:"-",record_time:"-",affiliation:"Technical University of Munich",paper_link:"https://openaccess.thecvf.com/content_CVPRW_2020/papers/w6/Mostajabi_High-Resolution_Radar_Dataset_for_Semi-Supervised_Learning_of_Dynamic_Objects_CVPRW_2020_paper.pdf"},{key:"6",name:["RADIATE","http://pro.hw.ac.uk/radiate/","6"],year:2020,task:["Object Detection"],sensors:"Radar (Navtech CTS350-X), Camera",radar_data_representation:["Radar Tensor"],category_number:8,categories:"car, van, bus, truck, motorbike, bicycle, pedestrian and a group of pedestria",scenarios:" Roads (wet, snowy, foggy, rainy, nighttime, urban, highway)",record_area:"Edinburgh",record_time:"Between February 2019 and February 2020",affiliation:"Heriot-Watt University",paper_link:"https://arxiv.org/pdf/2010.09076.pdf"},{key:"7",name:["AIODrive","http://www.aiodrive.org/","7"],year:2020,task:["Object Detection","Object Tracking","Semantic Segmentation","Depth Estimation"],sensors:"RGB, Stereo, Depth, LiDAR, SPAD-LiDAR, Radar, IMU, GPS",radar_data_representation:["Point Cloud"],category_number:11,categories:"Vehicle, Pedestrian, Vegetation, Building, Road, Sidewalk, Wall, Traffic Sign, Pole and Fence",scenarios:" Roads (highway, residential street, parking)",record_area:"one of eight cities from Carla assets",record_time:"-",affiliation:"Carnegie Mellon University",paper_link:"https://www.xinshuoweng.com/papers/AIODrive/arXiv.pdf"},{key:"8",name:["CRUW","https://www.cruwdataset.org/","8"],year:2021,task:["Object Detection"],sensors:"Radar (TI AWR1843, DCA1000), Cameras",radar_data_representation:["Radar Tensor"],category_number:3,categories:"Pedestrian, Cyclist, Car",scenarios:"Roads (parking, campus, city, highway)",record_area:"-",record_time:"-",affiliation:"University of Washington",paper_link:"https://arxiv.org/pdf/2107.14599.pdf"},{key:"9",name:["RaDICaL","https://publish.illinois.edu/radicaldata/","9"],year:2021,task:["Object Detection"],sensors:"Radar (TI IWR1443), RGB-D Camera",radar_data_representation:["ADC Signal"],category_number:2,categories:"Pedestrian, Car",scenarios:" Indoor (people, static clutter), Roads (urban, rural, highway, various traffic scenarios)",record_area:"-",record_time:"-",affiliation:"University of Illinois at Urbana-Champaign",paper_link:"https://ieeexplore.ieee.org/document/9361086"},{key:"10",name:["RadarScenes","https://radar-scenes.com/","10"],year:2021,task:["Object Detection","Semantic Segmentation"],sensors:"Radar (77GHz), Documentary Camera",radar_data_representation:["Point Cloud"],category_number:11,categories:"Car, Large Vehicle, Truck, Bus, Train, Bicycle, Motorized Two-wheeler, Pedestrian, Pedestrian Group, Animal, and Other",scenarios:" Roads (urban, suburban, rural, highway, tunnel, intersection, roundabout, parking)",record_area:"Ulm, Germany",record_time:"Between 2016 and 2018",affiliation:"Mercedes-Benz AG, Stuttgart, Germany",paper_link:"https://arxiv.org/pdf/2104.02493v1.pdf"},{key:"11",name:["RADDet","https://github.com/ZhangAoCanada/RADDet","11"],year:2021,task:["Object Detection"],sensors:"Radar (TI AWR1843), Stereo Cameras",radar_data_representation:["Radar Tensor"],category_number:6,categories:"Person, Bicycle, Car, Motorcycle, Bus, Truck",scenarios:" Roads (urban, rural, highway, intersections, weather conditions)",record_area:"-",record_time:"September to October 2020",affiliation:"University of Ottawa",paper_link:"https://ieeexplore.ieee.org/abstract/document/9469418"},{key:"12",name:["RADIal","https://github.com/valeoai/RADIal","12"],year:2021,task:["Object Detection","Semantic Segmentation"],sensors:"Radar (high-definition), Cameras, LiDAR",radar_data_representation:["ADC Signal","Radar Tensor","Point Cloud"],category_number:1,categories:"Vehicle",scenarios:"Roads (urban, highway, rural)",record_area:"-",record_time:"-",affiliation:"Valeo.ai, Paris, France",paper_link:"https://arxiv.org/abs/2112.10646"},{key:"13",name:["VoD","https://tudelft-iv.github.io/view-of-delft-dataset/","13"],year:2022,task:["Object Detection","Object Tracking"],sensors:"Radar (ZF FRGen 21), Stereo Camera, LiDAR",radar_data_representation:["Point Cloud"],category_number:13,categories:"Car, Pedestrian, Cyclist, Rider, Unused Bicycle, Bicycle Rack, Human Depiction, Moped or Scooter, Motor, Ride Other, Vehicle Other, Truck, Ride Uncertain",scenarios:"Roads (highway, rural, urban)",record_area:"City of Delft (The Netherlands)",record_time:"-",affiliation:"TU Delft, The Netherlands",paper_link:"https://pure.tudelft.nl/ws/portalfiles/portal/115464174/Multi_Class_Road_User_Detection_With_31D_Radar_in_the_View_of_Delft_Dataset.pdf"},{key:"14",name:["Boreas","https://www.boreas.utias.utoronto.ca/","14"],year:2022,task:["Object Detection","Localization","Odometry"],sensors:"Radar (Navtech CIR304-H), Camera, LiDAR",radar_data_representation:["Radar Tensor"],category_number:4,categories:"Car, Pedestrian, Cyclist, Misc",scenarios:"Roads (highway, rural, urban)",record_area:"University of Toronto Institute for Aerospace Studies (UTIAS)",record_time:"November, 2020 and \uFB01nishing in November, 2021",affiliation:"University of Toronto",paper_link:"https://arxiv.org/pdf/2203.10168.pdf"},{key:"15",name:["TJ4DRadSet","https://github.com/TJRadarLab/TJ4DRadSet","15"],year:2022,task:["Object Detection","Object Tracking"],sensors:"Radar (Oculii Eagle), Camera, LiDAR",radar_data_representation:["Point Cloud"],category_number:8,categories:"Car, Pedestrian, Cyclist, Bus, Motorcyclist, Truck, Engineering Vehicle, Tricyclist",scenarios:" Roads (intersections, one-way streets)",record_area:"Suzhou, China",record_time:"Fourth quarter of 2021",affiliation:"Tongji University",paper_link:"https://arxiv.org/vc/arxiv/papers/2204/2204.13483v2.pdf"},{key:"16",name:["K-Radar","https://github.com/kaist-avelab/k-radar","16"],year:2022,task:["Object Detection","Object Tracking","SLAM"],sensors:"Radar (RETINA-4ST), Stereo Cameras, LiDAR",radar_data_representation:["Radar Tensor"],category_number:5,categories:"Pedestrian, Motobike, Bicycle, Sedan, Bus or Truck",scenarios:"Roads (highway, intersection, urban)",record_area:"Daejeon of the Republic of Korea",record_time:"-",affiliation:"KAIST",paper_link:"https://www.researchgate.net/publication/361359662_K-Radar_4D_Radar_Object_Detection_Dataset_and_Benchmark_for_Autonomous_Driving_in_Various_Weather_Conditions"},{key:"17",name:["aiMotive","https://github.com/aimotive/aimotive_dataset","17"],year:2022,task:["Object Detection"],sensors:"Radar (77GHz), Camera, LiDAR",radar_data_representation:["Point cloud"],category_number:14,categories:"Pedestrian, Car, Bus, Truck, Van, Motorcycle, Pickup, Rider, Bicycle, Trailer, Train, Shopping Cart, Other Object",scenarios:" Roads (highway, urban, rural)",record_area:"California, US; Austria; and Hungary",record_time:"-",affiliation:"aimotive",paper_link:"https://openreview.net/pdf?id=yl9aThYT9W"},{key:"18",name:["WaterScenes","https://waterscenes.github.io","18"],year:2023,task:["Object Detection","Segmentation"],sensors:"Radar (Oculii Eagle), Camera, GPS, IMU",radar_data_representation:["point cloud"],category_number:7,categories:"Pier, Buoy, Sailor, Ship, Boat, Vessel, Kayak",scenarios:"Waterways (river, lake, canal, moat)",record_area:"Suzhou, China",record_time:"2022/08-2022/12",affiliation:"XJTLU",paper_link:"https://arxiv.org/pdf/2307.06505v2.pdf"},{key:"19",name:["MulRan","https://sites.google.com/view/mulran-pr","19"],year:2020,task:["Place Recognition"],sensors:"Radar (Navtech CIR204-H), Cameras, LiDAR",radar_data_representation:["Radar Tensor"],category_number:7,categories:"buildings, road, tree, sign, car, pedestrain, bike",scenarios:"Roads (city, highway, intersection, crosswalks, parks, recreational areas, tunnels, bridges)",record_area:"-",record_time:"2018-2019",affiliation:"Politecnico di Milano",paper_link:"https://gisbi-kim.github.io/publications/gkim-2020-icra.pdf"},{key:"20",name:["Oxford Radar RobotCar","http://ori.ox.ac.uk/datasets/radar-robotcar-dataset","20"],year:2020,task:["Object Detection","Odometer"],sensors:"Radar (Navtech CTS350-X), camera, LiDAR, GPS, INS",radar_data_representation:["Radar Tensor","Grid Map"],category_number:7,categories:"Vehicle,Pedestrian,Bicycle,Sign,Road,Lane,Road Marking",scenarios:"Roads (urban, highway, rural, industrial area, residential area, roundabout, intersection)",record_area:"-",record_time:"2019-2020",affiliation:"Department of Engineering Science, University of Oxford, UK",paper_link:"https://arxiv.org/pdf/1909.01300.pdf"},{key:"21",name:["SCORP","www.sensorcortek.ai/publications/","21"],year:2020,task:["Semantic Segmentation"],sensors:"Radar (76 GHz), Camera",radar_data_representation:["Radar Tensor"],category_number:0,categories:"-",scenarios:"Roads (parking lot)",record_area:"-",record_time:"-",affiliation:"University of Ottawa, Ottawa, Canada",paper_link:"https://ieeexplore.ieee.org/abstract/document/9299052"},{key:"22",name:["ColoRadar","https://arpg.github.io/coloradar/","22"],year:2022,task:["Localization"],sensors:"Radar (AWR1843), LiDAR, IMU",radar_data_representation:["Radar Tensor","Point Cloud"],category_number:0,categories:"-",scenarios:"Indoor, outdoor environments",record_area:"-",record_time:"-",affiliation:"Department of Computer Science, University of Colorado Boulder, USA",paper_link:"https://journals.sagepub.com/doi/10.1177/02783649211068535"},{key:"23",name:["Pixset","dataset.leddartech.com","23"],year:2021,task:["Object Detection","Object Tracking"],sensors:"Radar (TI AWR1843), Cameras, LiDARs",radar_data_representation:["Point Cloud"],category_number:0,categories:"-",scenarios:"Roads (Car, Pedestrian, Cyclist)",record_area:"-",record_time:"2019",affiliation:"-",paper_link:"https://arxiv.org/pdf/2102.12010v1.pdf"},{key:"24",name:["NTU4DRadLM","https://github.com/ junzhang2016/NTU4DRadLM","24"],year:2023,task:["SLAM"],sensors:"a 3D LiDAR, a visual camera, a 4D Radar, a thermal camera, an IMU and a RTK GPS",radar_data_representation:["Point Cloud"],category_number:0,categories:"-",scenarios:"Roads (carpark, garden, campus)",record_area:"-",record_time:"-",affiliation:"Nanyang Technological University",paper_link:"https://arxiv.org/pdf/2309.00962.pdf"},{key:"25",name:["Dual-Radar","ttps://github.com/adept- thu/Dual-Radar","25"],year:2023,task:["Object Detection","Object Tracking"],sensors:"Radar (ARS548 RDI, Arbe Phoenix), Camera, LiDAR",radar_data_representation:["Point Cloud"],category_number:6,categories:"Car, Pedestrian, Cyclist, Bus, Truck, other",scenarios:"Roads (carpark, garden, campus)",record_area:"-",record_time:"-",affiliation:"Tsinghua University, Beijing",paper_link:"http://export.arxiv.org/pdf/2310.07602"},{key:"26",name:["Dop-NET","https://github.com/UCLRadarGroup/DopNet","26"],year:2020,task:["Classification"],sensors:"Radar (Ancortek 24GHz)",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:"Gestures (wave, pinch, click, swipe)",record_area:"-",record_time:"-",affiliation:"University College London",paper_link:"https://ietresearch.onlinelibrary.wiley.com/doi/epdf/10.1049/el.2019.4153"},{key:"27",name:["CI4R","-","27"],year:2020,task:["Classification"],sensors:"Radar (77GHz, 24GHz, Xethru)",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:"Activities (walking, picking, sitting, crawling, kneeling, limping)",record_area:"-",record_time:"-",affiliation:"-",paper_link:"https://doi.org/10.1117/12.2559155"},{key:"28",name:["Open Radar Datasets","-","28"],year:2021,task:["Classification"],sensors:"Radar (TI AWR2243), Camera, GPS, IMU",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:"Roads (urban, highway, rural)",record_area:"-",record_time:"-",affiliation:"Norwegian Defence Research Establishment, Kjeller, Norway",paper_link:"https://ieeexplore.ieee.org/abstract/document/9455239"},{key:"29",name:["MCD-Gesture","https://github.com/yadongcs/cross_domain_gesture_dataset","29"],year:2022,task:["Classification"],sensors:"Radar (TI AWR1843)",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:" Gestures (push, pull, slide left, slide right, clockwise turning, counterclockwise turning)",record_area:"-",record_time:"-",affiliation:"School of Cyber Science and Technology University of Science and Technology of China",paper_link:"https://ieeexplore.ieee.org/abstract/document/9894724"}],a=function(t,c,s,_){console.log("params",t,c,s,_)};return(0,e.jsx)("div",o()(o()(o()({},i),u.wrapper),{},{id:"datasets",children:(0,e.jsxs)("div",{className:"title-wrapper",children:[(0,e.jsx)(Re.Z,{orientation:"center",children:(0,e.jsx)("h1",{name:"title",className:"title-h1",children:"Radar Perception Datasets"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},pagination:{pageSize:10,hideOnSinglePage:!0},columns:l,dataSource:n,onChange:a,expandable:{columnTitle:"Size / Scenarios",expandedRowRender:function(t){return(0,e.jsxs)("p",{style:{margin:0},children:["Size: ",t.size,(0,e.jsx)("br",{}),"Scenarios: ",t.scenarios]})},rowExpandable:function(t){return t.name!=="Not Expandable"}}})]})}))}}]),r}(k.PureComponent),Ce=Se,De=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Object Detection":s="#1890ff";break;case"Semantic Segmentation":s="#fa541c";break;case"Object Tracking":s="#fa8c16";break;case"Localization":s="#13c2c2";break;case"Classification/Motion Recognition":s="#52c41a";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Radar Image Reconstruction from Raw ADC Data using Parametric Variational Autoencoder with Domain Adaptation",short_name:"-",year:2020,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"ICPR",paper_link:"https://ieeexplore.ieee.org/abstract/document/9412858",source_code:""},{key:"2",name:"Improved Target Detection and Feature Extraction using a Complex-Valued Adaptive Sine Filter on Radar Time Domain Data",short_name:"-",year:2021,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"EUSIPCO",paper_link:"https://ieeexplore.ieee.org/abstract/document/9616250",source_code:""},{key:"3",name:"Data-Driven Radar Processing Using a Parametric Convolutional Neural Network for Human Activity Classification",short_name:"-",year:2021,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"IEEE Sensors",paper_link:"https://ieeexplore.ieee.org/abstract/document/9464267",source_code:""},{key:"4",name:"Spiking Neural Network-Based Radar Gesture Recognition System Using Raw ADC Data",short_name:"-",year:2021,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"IEEE Sensors Letters",paper_link:"https://ieeexplore.ieee.org/abstract/document/9772332",source_code:""},{key:"5",name:"Detection of Human Breathing in Non-Line-of-Sight Region by Using mmWave FMCW Radar",short_name:"-",year:2022,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"TIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/9897091",source_code:""},{key:"6",name:"CubeLearn: End-to-End Learning for Human Motion Recognition From Raw mmWave Radar Signals",short_name:"CubeLearn",year:2023,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"IEEE IOT",paper_link:"https://ieeexplore.ieee.org/abstract/document/10018429",source_code:""},{key:"7",name:"ADCNet: End-to-end perception with raw radar ADC data",short_name:"ADCNet",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"arXiv",paper_link:"https://arxiv.org/abs/2303.11420",source_code:""},{key:"8",name:"T-FFTRadNet: Object Detection with Swin Vision Transformers from Raw ADC Radar Signals",short_name:"T-FFTRadNet",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"arXiv",paper_link:"https://arxiv.org/abs/2303.16940",source_code:""},{key:"9",name:"Echoes Beyond Points: Unleashing the Power of Raw Radar Data in Multi-modality Fusion",short_name:"Echoes Beyond Points",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"NeurIPS",paper_link:"https://arxiv.org/abs/2307.16532",source_code:""},{key:"10",name:"Azimuth Super-Resolution for FMCW Radar in Autonomous Driving",short_name:"-",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"CVPR",paper_link:"https://openaccess.thecvf.com/content/CVPR2023/html/Li_Azimuth_Super-Resolution_for_FMCW_Radar_in_Autonomous_Driving_CVPR_2023_paper.html",source_code:""},{key:"11",name:"RF-based child occupation detection in the vehicle interior",short_name:"-",year:2016,task:["Vital Sign"],dataset:[],conference_journal:"IRS",paper_link:"https://ieeexplore.ieee.org/document/7497352",source_code:""},{key:"12",name:"A Theoretical Investigation of the Detection of Vital Signs in Presence of Car Vibrations and RADAR-Based Passenger Classification",short_name:"-",year:2019,task:["Vital Sign"],dataset:[],conference_journal:"TVT",paper_link:"https://ieeexplore.ieee.org/abstract/document/8638548",source_code:""},{key:"13",name:"Non-Contact Vital Signs Monitoring for Multiple Subjects Using a Millimeter Wave FMCW Automotive Radar",short_name:"-",year:2020,task:["Vital Sign"],dataset:[],conference_journal:"IMS",paper_link:"https://ieeexplore.ieee.org/abstract/document/9223838",source_code:""},{key:"14",name:"Sparsity-Based Multi-Person Non-Contact Vital Signs Monitoring via FMCW Radar",short_name:"-",year:2023,task:["Vital Sign"],dataset:[],conference_journal:"JBHI",paper_link:"https://ieeexplore.ieee.org/abstract/document/10065434",source_code:""},{key:"15",name:"Radar-Based Monitoring of Vital Signs: A Tutorial Overview",short_name:"-",year:2023,task:["Vital Sign"],dataset:[],conference_journal:"JPROC",paper_link:"https://ieeexplore.ieee.org/abstract/document/10049295",source_code:""}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"ADC Signal Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),xe=De,je=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Detection":s="#1890ff";break;case"Segmentation":s="#fa541c";break;case"Tracking":s="#fa8c16";break;case"Multi-Task":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Experiments with mmWave Automotive Radar Test-bed",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"RA ACSSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/9048939",source_code:"-"},{key:"2",name:"Vehicle Detection With Automotive Radar Using Deep Learning on Range-Azimuth-Doppler Tensors",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"RAD ICCVW",paper_link:"https://ieeexplore.ieee.org/document/9022248",source_code:"-"},{key:"3",name:"Probabilistic oriented object detection in automotive radar",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"RA CVPRW",paper_link:"https://ieeexplore.ieee.org/document/9150751",source_code:"-"},{key:"4",name:"RODNet: Radar Object Detection Using Cross-Modal Supervision",short_name:"RODNet",year:2020,task:["Detection"],dataset:["CRUW"],conference_journal:"RA WACV",paper_link:"https://openaccess.thecvf.com/content/WACV2021/papers/Wang_RODNet_Radar_Object_Detection_Using_Cross-Modal_Supervision_WACV_2021_paper.pdf",source_code:"-"},{key:"5",name:"RODNet: A Real-Time Radar Object Detection Network Cross-Supervised by Camera-Radar Fused Object 3D Localization",short_name:"RODNet",year:2020,task:["Detection"],dataset:["CRUW"],conference_journal:"RA JSTSP",paper_link:"https://ieeexplore.ieee.org/document/9353210",source_code:"-"},{key:"6",name:"Range-Doppler Detection in Automotive Radar with Deep Learning",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"RD IJCNN",paper_link:"https://ieeexplore.ieee.org/document/9207080",source_code:"-"},{key:"7",name:"RAMP-CNN: A Novel Neural Network for Enhanced Automotive Radar Object Recognition",short_name:"RAMP-CNN",year:2020,task:["Detection"],dataset:[],conference_journal:"RAD IEEE Sensors",paper_link:"https://ieeexplore.ieee.org/abstract/document/9249018",source_code:"-"},{key:"8",name:"CNN Based Road User Detection Using the 3D Radar Cube",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"RAD RAL",paper_link:"https://ieeexplore.ieee.org/abstract/document/8962258",source_code:"-"},{key:"9",name:"Graph Convolutional Networks for 3D Object Detection on Radar Data",short_name:"GTR-Net",year:2021,task:["Detection"],dataset:[],conference_journal:"RAD ICCV Workshop",paper_link:"https://openaccess.thecvf.com/content/ICCV2021W/AVVision/html/Meyer_Graph_Convolutional_Networks_for_3D_Object_Detection_on_Radar_Data_ICCVW_2021_paper.html?ref=https://githubhelp.com",source_code:"-"},{key:"10",name:"RADDet: Range-Azimuth-Doppler based Radar Object Detection for Dynamic Road Users",short_name:"RADDet",year:2021,task:["Detection"],dataset:["RADDet"],conference_journal:"RAD CRV",paper_link:"https://openaccess.thecvf.com/content/ICCV2021W/AVVision/html/Meyer_Graph_Convolutional_Networks_for_3D_Object_Detection_on_Radar_Data_ICCVW_2021_paper.html?ref=https://githubhelp.com",source_code:"https://github.com/ZhangAoCanada/RADDet"},{key:"11",name:"DAROD: A Deep Automotive Radar Object Detector on Range-Doppler maps",short_name:"DAROD",year:2022,task:["Detection"],dataset:["CARRADA RADDet"],conference_journal:"RD IV",paper_link:"https://ieeexplore.ieee.org/document/9827281",source_code:"-"},{key:"12",name:"K-Radar: 4D Radar Object Detection for Autonomous Driving in Various Weather Conditions",short_name:"K-Radar",year:2022,task:["Detection"],dataset:["K-Radar"],conference_journal:"RADE NeurIPS",paper_link:"https://proceedings.neurips.cc/paper_files/paper/2022/hash/185fdf627eaae2abab36205dcd19b817-Abstract-Datasets_and_Benchmarks.html",source_code:"https://github.com/kaist-avelab/k-radar"},{key:"13",name:"Enhanced K-Radar: Optimal Density Reduction to Improve Detection Performance and Accessibility of 4D Radar Tensor-based Object Detection",short_name:"Enhanced K-Radar",year:2023,task:["Detection"],dataset:["K-Radar"],conference_journal:"RADE arXiv",paper_link:"https://arxiv.org/abs/2303.06342",source_code:"-"},{key:"14",name:"RSS-Net: Weakly-supervised multi-class semantic segmentation with FMCW radar",short_name:"RSS-Net",year:2020,task:["Segmentation"],dataset:[],conference_journal:"RAD IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/9304674",source_code:"-"},{key:"15",name:"Deep Open Space Segmentation using Automotive Radar",short_name:"-",year:2020,task:["Segmentation"],dataset:[],conference_journal:"RAD ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/9299052",source_code:"-"},{key:"16",name:"PolarNet: Accelerated Deep Open Space Segmentation using Automotive Radar in Polar Domain",short_name:"PolarNet",year:2021,task:["Segmentation"],dataset:[],conference_journal:"RAD VEHITS",paper_link:"https://arxiv.org/abs/2103.03387",source_code:"-"},{key:"17",name:"Multi-view Radar Semantic Segmentation",short_name:"-",year:2021,task:["Segmentation"],dataset:[],conference_journal:"RAD ICCV",paper_link:"https://openaccess.thecvf.com/content/ICCV2021/html/Ouaknine_Multi-View_Radar_Semantic_Segmentation_ICCV_2021_paper.html",source_code:"https://github.com/valeoai/MVRSS"},{key:"18",name:"Raw High-Definition Radar for Multi-Task Learning",short_name:"FFT-RadNet",year:2022,task:["Multi-Task"],dataset:["RADIal"],conference_journal:"CVPR",paper_link:"https://openaccess.thecvf.com/content/CVPR2022/html/Rebut_Raw_High-Definition_Radar_for_Multi-Task_Learning_CVPR_2022_paper.html",source_code:"-"},{key:"19",name:"Cross-Modal Supervision-Based Multitask Learning With Automotive Radar Raw Data",short_name:"-",year:2023,task:["Multi-Task"],dataset:["RADIal"],conference_journal:"RD TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10008067",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Radar Tensor Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Ae=je,Me=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Detection":s="#1890ff";break;case"Segmentation":s="#fa541c";break;case"Tracking":s="#fa8c16";break;case"Odometry":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Comparison of random forest and long short-term memory network performances in classification tasks using radar",short_name:"-",year:2017,task:["Classification"],dataset:[],conference_journal:"SDF",paper_link:"https://ieeexplore.ieee.org/abstract/document/8126350",source_code:"-"},{key:"2",name:"Radar-based Feature Design and Multiclass Classification for Road User Recognition",short_name:"-",year:2018,task:["Classification"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/8500607",source_code:"-"},{key:"3",name:"Off-the-shelf sensor vs. experimental radar - How much resolution is necessary in automotive radar classification?",short_name:"-",year:2020,task:["Classification"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/abstract/document/9190338",source_code:"-"},{key:"4",name:"Radar-PointGNN: Graph Based Object Recognition for Unstructured Radar Point-cloud Data",short_name:"Radar-PointGNN",year:2022,task:["Classification"],dataset:[],conference_journal:"RadarConf",paper_link:"https://ieeexplore.ieee.org/abstract/document/9455172",source_code:"-"},{key:"5",name:"2D Car Detection in Radar Data with PointNets",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/8917000",source_code:"-"},{key:"6",name:"Detection and Tracking on Automotive Radar Data with Deep Learning",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/abstract/document/9190261",source_code:"-"},{key:"7",name:"Seeing Around Street Corners: Non-Line-of-Sight Detection and Tracking In-the-Wild Using Doppler Radar",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"CVPR",paper_link:"https://openaccess.thecvf.com/content_CVPR_2020/html/Scheiner_Seeing_Around_Street_Corners_Non-Line-of-Sight_Detection_and_Tracking_In-the-Wild_Using_CVPR_2020_paper.html",source_code:"-"},{key:"8",name:"RPFA-Net: a 4D RaDAR Pillar Feature Attention Network for 3D Object Detection",short_name:"RPFA-Net",year:2021,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/9564754",source_code:"https://github.com/adept-thu/RPFA-Net"},{key:"9",name:"Comparison of Different Approaches for Identification of Radar Ghost Detections in Automotive Scenarios",short_name:"-",year:2021,task:["Detection"],dataset:[],conference_journal:"RadarConf",paper_link:"https://ieeexplore.ieee.org/document/9454980",source_code:"-"},{key:"10",name:"Contrastive Learning for Automotive mmWave Radar Detection Points Based Instance Segmentation",short_name:"-",year:2022,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/document/9922540",source_code:"-"},{key:"11",name:"3D Object Detection for Multiframe 4-D Automotive Millimeter-Wave Radar Point Cloud",short_name:"-",year:2023,task:["Detection"],dataset:["TJ4DRadSet"],conference_journal:"IEEE Sensors",paper_link:"https://ieeexplore.ieee.org/abstract/document/9944629",source_code:"-"},{key:"12",name:"SMURF: Spatial Multi-Representation Fusion for 3D Object Detection with 4D Imaging Radar",short_name:"SMURF",year:2023,task:["Detection"],dataset:["VoD","TJ4DRadSet"],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10274127",source_code:"-"},{key:"13",name:"MVFAN: Multi-View Feature Assisted Network for 4D Radar Object Detection",short_name:"MVFAN",year:2023,task:["Detection"],dataset:["VoD","Astyx"],conference_journal:"ICONIP",paper_link:"https://arxiv.org/abs/2310.16389",source_code:"-"},{key:"14",name:"Semantic Segmentation on Radar Point Clouds",short_name:"-",year:2018,task:["Segmentation"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/document/8455344",source_code:"-"},{key:"15",name:"Supervised Clustering for Radar Applications: On the Way to Radar Instance Segmentation",short_name:"-",year:2018,task:["Segmentation"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/8917000",source_code:"-"},{key:"16",name:"2D Car Detection in Radar Data with PointNets",short_name:"-",year:2019,task:["Segmentation"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/8917000",source_code:"-"},{key:"17",name:"RSS-Net: Weakly-Supervised Multi-Class Semantic Segmentation with FMCW Radar",short_name:"RSS-Net",year:2020,task:["Segmentation"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/9304674",source_code:"-"},{key:"18",name:"Panoptic Segmentation for Automotive Radar Point Cloud",short_name:"-",year:2022,task:["Segmentation"],dataset:[],conference_journal:"RadarConf",paper_link:"https://ieeexplore.ieee.org/document/9764218",source_code:"-"},{key:"19",name:"Deep Instance Segmentation With Automotive Radar Detection Points",short_name:"-",year:2022,task:["Segmentation"],dataset:["RadarScenes"],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/9762032",source_code:"-"},{key:"20",name:"Detection and Tracking on Automotive Radar Data with Deep Learning",short_name:"-",year:2020,task:["Tracking"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/abstract/document/9190261",source_code:"-"},{key:"21",name:"Which Framework is Suitable for Online 3D Multi-Object Tracking for Autonomous Driving with Automotive 4D Imaging Radar?",short_name:"-",year:2023,task:["Tracking"],dataset:[],conference_journal:"arXiv",paper_link:"https://arxiv.org/abs/2309.06036",source_code:"-"},{key:"22",name:"Efficient Deep-Learning 4D Automotive Radar Odometry Method",short_name:"-",year:2023,task:["Odometry"],dataset:[],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10237296",source_code:"-"},{key:"23",name:"DRIO: Robust Radar-Inertial Odometry in Dynamic Environments",short_name:"DRIO",year:2023,task:["Odometry"],dataset:[],conference_journal:"RAL",paper_link:"https://ieeexplore.ieee.org/abstract/document/10207713",source_code:"-"},{key:"24",name:"Person Reidentification Based on Automotive Radar Point Clouds",short_name:"-",year:2021,task:["Gait Recognition"],dataset:[],conference_journal:"TGRS",paper_link:"https://ieeexplore.ieee.org/document/9420713",source_code:"-"},{key:"25",name:"Gait Recognition for Co-Existing Multiple People Using Millimeter Wave Sensing",short_name:"-",year:2020,task:["Gait Recognition"],dataset:[],conference_journal:"AAAI",paper_link:"https://ojs.aaai.org/index.php/AAAI/article/view/5430",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Point Cloud Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Ie=Me,Ne=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Detection":s="#1890ff";break;case"Segmentation":s="#fa541c";break;case"Tracking":s="#fa8c16";break;case"Localization":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Automotive Radar Gridmap Representations",short_name:"-",year:2015,task:["Detection"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/7117922",source_code:"-"},{key:"2",name:"Detection of Arbitrarily Rotated Parked Cars Based on Radar Sensors",short_name:"-",year:2015,task:["Detection"],dataset:[],conference_journal:"IRS",paper_link:"https://ieeexplore.ieee.org/abstract/document/7226281",source_code:"-"},{key:"3",name:"3D Occupancy Grid Mapping Using Statistical Radar Models",short_name:"-",year:2016,task:["Detection"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/7535495",source_code:"-"},{key:"4",name:"Semantic Radar Grids",short_name:"-",year:2017,task:["Detection"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/7995871",source_code:"-"},{key:"5",name:"Adaptions for Automotive Radar Based Occupancy Gridmaps",short_name:"-",year:2018,task:["Detection"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/8443484",source_code:"-"},{key:"6",name:"High Resolution Radar-based Occupancy Grid Mapping and Free Space Detection",short_name:"-",year:2018,task:["Detection"],dataset:[],conference_journal:"VEHITS",paper_link:"https://pdfs.semanticscholar.org/d888/6334e15acebe688f993f45da7ba7bde79eff.pdf",source_code:"-"},{key:"7",name:"Semantic Segmentation on Automotive Radar Maps",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/8813808",source_code:"-"},{key:"8",name:"Occupancy Grids Generation Using Deep Radar Network for Autonomous Driving",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/8916897",source_code:"-"},{key:"9",name:"Semantic Segmentation on 3D Occupancy Grids for Automotive Radar",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"IEEE ACCESS",paper_link:"https://ieeexplore.ieee.org/abstract/document/9229096",source_code:"-"},{key:"10",name:"NVRadarNet: Real-Time Radar Obstacle and Free Space Detection for Autonomous Driving",short_name:"NVRadarNet",year:2023,task:["Detection"],dataset:[],conference_journal:"RA ICRA",paper_link:"https://arxiv.org/abs/2209.14499",source_code:"-"},{key:"11",name:"Road Scene Understanding by Occupancy Grid Learning from Sparse Radar Clusters using Semantic Segmentation",short_name:"-",year:2019,task:["Segmentation"],dataset:[],conference_journal:"ICCV",paper_link:"https://openaccess.thecvf.com/content_ICCVW_2019/html/CVRSUAD/Sless_Road_Scene_Understanding_by_Occupancy_Grid_Learning_from_Sparse_Radar_ICCVW_2019_paper.html",source_code:"-"},{key:"12",name:"CNN based road course estimation on automotive radar data with various gridmaps",short_name:"-",year:2020,task:["Segmentation"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/document/9299086",source_code:"-"},{key:"13",name:"Scene Understanding With Automotive Radar",short_name:"-",year:2020,task:["Segmentation"],dataset:[],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/8911477",source_code:"-"},{key:"14",name:"Semantic Segmentation-Based Occupancy Grid Map Learning With Automotive Radar Raw Data",short_name:"-",year:2023,task:["Segmentation"],dataset:["RADIal"],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10273590",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Grid Map Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Pe=Ne,we=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Object Detection":s="#1890ff";break;case"Semantic Segmentation":s="#fa541c";break;case"Object Tracking":s="#fa8c16";break;case"Localization":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Human Detection and Activity Classification Based on Micro-Doppler Signatures Using Deep Convolutional Neural Networks",short_name:"-",year:2016,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"IEEE Geoscience and Remote Sensing Letters",paper_link:"https://ieeexplore.ieee.org/abstract/document/7314905",source_code:"-"},{key:"2",name:"New Analysis of Radar Micro-Doppler Gait Signatures for Rehabilitation and Assisted Living",short_name:"-",year:2017,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ICASSP",paper_link:"https://ieeexplore.ieee.org/abstract/document/7952908",source_code:"-"},{key:"3",name:"Human Motion Classification with Micro-Doppler Radar and Bayesian-Optimized Convolutional Neural Networks",short_name:"-",year:2018,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ICASSP",paper_link:"https://ieeexplore.ieee.org/abstract/document/8461847",source_code:"-"},{key:"4",name:"Radar-Based Human-Motion Recognition With Deep Learning: Promising Applications for Indoor Monitoring",short_name:"-",year:2018,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"IEEE Signal Processing Magazine",paper_link:"https://ieeexplore.ieee.org/abstract/document/8746862",source_code:"-"},{key:"5",name:"Radar-Based Human Gait Recognition Using Dual-Channel Deep Convolutional Neural Network",short_name:"-",year:2019,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"TGRS",paper_link:"https://ieeexplore.ieee.org/abstract/document/8789686",source_code:"-"},{key:"6",name:"Experiments with mmWave Automotive Radar Test-bed",short_name:"-",year:2019,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ACSSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/9048939",source_code:"-"},{key:"7",name:"Attention-Based Dual-Stream Vision Transformer for Radar Gait Recognition",short_name:"-",year:2022,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ICASSP",paper_link:"https://ieeexplore.ieee.org/abstract/document/9746565",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Micro-Doppler Signature Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Oe=we,Te=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=["S. Chadwick, W. Maddetn, and P. Newman, \u201CDistant vehicle detection using radar and vision,\u201D Proceedings - IEEE International Conference on Robotics and Automation, vol. 2019-May, pp. 8311\u20148317, 2019.","M. Meyer and G. Kuschk, \u201CAstyx: Automotive radar dataset for deep learning based 3D object detection,\u201D EuRAD 2019 - 2019 16th European Radar Conference, pp. 129\u2014132, 2019.","M. Bijelic, T. Gruber, F. Mannan, F. Kraus, W. Ritter, K. Dietmayer, and F. Heide, \u201CSeeing Through Fog Without Seeing Fog: Deep Multimodal Sensor Fusion in Unseen Adverse Weather,\u201D 2020 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), vol. 00, pp. 11 679\u201311 689, 2020.","A. Ouaknine, A. Newson, J. Rebut, F. Tupin, and P. Perez, \u201CCARRADA dataset: Camera and automotive radar with range-Angle-doppler annotations,\u201D arXiv, 2020.","J. Guan, S. Madani, S. Jog, S. Gupta, and H. Hassanieh, \u201CThrough Fog High-Resolution Imaging Using Millimeter Wave Radar,\u201D ser. Proceedings of the IEEE Computer Society Conference on Computer Vision and Pattern Recognition, 2020, pp. 11 461\u201411 470.","M. Mostajabi, C. M. Wang, D. Ranjan, and G. Hsyu, \u201CHigh resolution radar dataset for semi-supervised learning of dynamic objects,\u201D IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops, vol. 2020-June, pp. 450\u2014457, 2020.","M. Sheeny, E. De Pellegrin, S. Mukherjee, A. Ahrabian, S. Wang, and A. Wallace, \u201CRadiate: A radar dataset for automotive perception in bad weather,\u201D in 2021 IEEE International Conference on Robotics and Automation (ICRA). IEEE, 2021, pp. 1\u20137.","X. Weng, Y. Man, D. Cheng, J. Park, M. O\u2019Toole, and K. Kitani, \u201CAll-In-One Drive: A Large-Scale Comprehensive Perception Dataset with High-Density Long-Range Point Clouds.\u201D","Y. Wang, G. Wang, H.-M. Hsu, H. Liu, and J.-N. Hwang, \u201CRethinking of Radar\u2019s Role: A Camera-Radar Dataset and Systematic Annotator via Coordinate Alignment,\u201D in CVPRW, 2021.","T.-Y. Lim, S. A. Markowitz, and M. N. Do, \u201CRaDICaL: A Synchronized FMCW Radar, Depth, IMU and RGB Camera Data Dataset with Low-Level FMCW Radar Signals.\u201D","O. Schumann, M. Hahn, N. Scheiner, F. Weishaupt, J. F. Tilly, J. Dickmann, and C. Wohler, \u201CRadarScenes: A Real-World Radar Point Cloud Data Set for Automotive Applications,\u201D 2021. [Online]. Available: http://arxiv.org/abs/2104.02493","A. Zhang, F. E. Nowruzi, and R. Laganiere, \u201CRADDet: Range-Azimuth-Doppler based Radar Object Detection for Dynamic Road Users,\u201D 2021 18th Conference on Robots and Vision (CRV), vol. 00, pp. 95\u2013102, 2021.","Y. Cheng, J. Zhu, M. Jiang, J. Fu, C. Pang1, P. Wang1, K. Sankaran3, O. Onabola3, Y. Liu2, D. Liu3, and Y. Bengio3, \u201CFloW: A Dataset and Benchmark for Floating Waste Detection in Inland Waters,\u201D ser. ICCV, 2021.","J. Rebut, A. Ouaknine, W. Malik, and P. Pe \u0301rez, \u201CRaw high-definition radar for multi-task learning,\u201D in Proceedings of the IEEE/CVF Confer- ence on Computer Vision and Pattern Recognition, 2022, pp. 17 021\u2013 17 030.","A. Palffy, E. Pool, S. Baratam, J. Kooij, and D. Gavrila, \u201CMulti-class Road User Detection with 3+1D Radar in the View-of-Delft Dataset,\u201D IEEE Robotics and Automation Letters, vol. PP, no. 99, pp. 1\u20131, 2022.","K. Burnett, D. J. Yoon, Y. Wu, A. Z. Li, H. Zhang, S. Lu, J. Qian, W.-K. Tseng, A. Lambert, K. Y. K. Leung, A. P. Schoellig, and T. D. Barfoot, \u201CBoreas: A Multi-Season Autonomous Driving Dataset,\u201D arXiv, 2022.","A. Palffy, E. Pool, S. Baratam, J. Kooij, and D. Gavrila, \u201CMulti-class Road User Detection with 3+1D Radar in the View-of-Delft Dataset,\u201D IEEE Robotics and Automation Letters, vol. PP, no. 99, pp. 1\u20131, 2022.","D.-H. Paek, S.-H. Kong, and K. T. Wijaya, \u201CK-Radar: 4D Radar Object Detection Dataset and Benchmark for Autonomous Driving in Various Weather Conditions,\u201D arXiv, 2022.","T.Matuszka,I.Barton,A \u0301.Butykai,P.Hajas,D.Kiss,D.Kova \u0301cs, S. Kunsa \u0301gi-Ma \u0301te \u0301, P. Lengyel, G. Ne \u0301meth, L. Peto \u030B et al., \u201Caimotive dataset: A multimodal dataset for robust autonomous driving with long-range perception,\u201D arXiv preprint arXiv:2211.09445, 2022.","Yao S, Guan R, Wu Z, et al. Waterscenes: A multi-task 4d radar-camera fusion dataset and benchmark for autonomous driving on water surfaces[J]. arXiv preprint arXiv:2307.06505, 2023."],g=["S. Chadwick, W. Maddern, and P. Newman, \u201CDistant vehicle detection using radar and vision,\u201D in 2019 International Conference on Robotics and Automation (ICRA). IEEE, 2019, pp. 8311\u20138317.","R. Nabati and H. Qi, \u201CRrpn: Radar region proposal network for object detection in autonomous vehicles,\u201D in 2019 IEEE International Conference on Image Processing (ICIP). IEEE, 2019, pp. 3093\u20133097.","H. Jha, V. Lodhi, and D. Chakravarty, \u201CObject Detection and Identification Using Vision and Radar Data Fusion System for Ground-Based Navigation,\u201D 2019 6th International Conference on Signal Processing and Integrated Networks (SPIN), vol. 00, pp. 590\u2013593, 2019.","V. Lekic and Z. Babic, \u201CAutomotive radar and camera fusion using generative adversarial networks,\u201D Computer Vision and Image Under- standing, vol. 184, pp. 1\u20138, 2019.","M. Meyer and G. Kuschk, \u201CDeep learning based 3d object detection for automotive radar and camera,\u201D in 2019 16th European Radar Conference (EuRAD). IEEE, 2019, pp. 133\u2013136.","V. John and S. Mita, \u201CRvnet: Deep sensor fusion of monocular camera and radar for image-based obstacle detection in challenging environments,\u201D in Image and Video Technology: 9th Pacific-Rim Sym- posium, PSIVT 2019, Sydney, NSW, Australia, November 18\u201322, 2019, Proceedings 9. Springer, 2019, pp. 351\u2013364.","T.-Y. Lim, A. Ansari, B. Major, D. Fontijne, M. Hamilton, R. Gowaikar, and S. Subramanian, \u201CRadar and camera early fusion for vehicle detection in advanced driver assistance systems,\u201D in Machine learning for autonomous driving workshop at the 33rd conference on neural information processing systems, vol. 2, 2019, p. 7.","V. John, M. K. Nithilan, S. Mita, H. Tehrani, R. S. Sudheesh, and P. P. Lalu, \u201CSO-Net: Joint Semantic Segmentation and Obstacle Detection Using Deep Fusion of Monocular Camera and Radar,\u201D Lecture Notes in Computer Science, pp. 138\u2013148, 2020.","S. Chang, Y. Zhang, F. Zhang, X. Zhao, S. Huang, Z. Feng, and Z. Wei, \u201CSpatial attention fusion for obstacle detection using mmwave radar and vision sensor,\u201D Sensors, vol. 20, no. 4, p. 956, 2020.","F. Nobis, M. Geisslinger, M. Weber, J. Betz, and M. Lienkamp, \u201CA Deep learning-based radar and camera sensor fusion architecture for object detection,\u201D arXiv, 2020.","M. Bijelic, T. Gruber, F. Mannan, F. Kraus, W. Ritter, K. Dietmayer, and F. Heide, \u201CSeeing through fog without seeing fog: Deep multimodal sensor fusion in unseen adverse weather,\u201D in Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2020, pp. 11 682\u201311 692.","R. Yadav, A. Vierling, and K. Berns, \u201CRadar+ rgb attentive fusion for robust object detection in autonomous vehicles,\u201D arXiv preprint arXiv:2008.13642, 2020.","R. Nabati and H. Qi, \u201CRadar-camera sensor fusion for joint object detection and distance estimation in autonomous vehicles,\u201D arXiv preprint arXiv:2009.08428, 2020","K. Kowol, M. Rottmann, S. Bracke, and H. Gottschalk, \u201CYodar: uncertainty-based sensor fusion for vehicle detection with camera and radar sensors,\u201D arXiv preprint arXiv:2010.03320, 2020.","Y. Wang, Z. Jiang, X. Gao, J.-N. Hwang, G. Xing, and H. Liu, \u201CRODNet: Radar Object Detection using Cross-Modal Supervision,\u201D 2021 IEEE Winter Conference on Applications of Computer Vision(WACV), vol. 00, pp. 504\u2013513, 2021.","X. Gao, G. Xing, S. Roy, and H. Liu, \u201CRAMP-CNN: A Novel Neural Network for Enhanced Automotive Radar Object Recognition,\u201D IEEE Sensors Journal, vol. 21, no. 4, pp. 5119\u20145132, 2021.","L.-q. Li and Y.-l. Xie, \u201CA Feature Pyramid Fusion Detection Algorithm Based on Radar and Camera Sensor,\u201D 2020 15th IEEE International Conference on Signal Processing (ICSP), vol. 1, pp. 366\u2013370, 2020.","J. Kim, Y. Kim, and D. Kum, \u201CLow-level Sensor Fusion for 3D Vehicle Detection using Radar Range-Azimuth Heatmap and Monocular Image,\u201D Lecture Notes in Computer Science, pp. 388\u2013402, 2021.","X. Dong, B. Zhuang, Y. Mao, and L. Liu, \u201CRadar Camera Fusion via Representation Learning in Autonomous Driving,\u201D 2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops (CVPRW), vol. 00, pp. 1672\u20131681, 2021.","F. Nobis, E. Shafiei, P. Karle, J. Betz, and M. Lienkamp, \u201CRadar Voxel Fusion for 3D Object Detection,\u201D Applied Sciences, vol. 11, no. 12, p. 5598, 2021.","H. Cui, J. Wu, J. Zhang, G. Chowdhary, and W. R. Norris, \u201C3D Detection and Tracking for On-road Vehicles with a Monovision Camera and Dual Low-cost 4D mmWave Radars,\u201D 2021 IEEE International Intelligent Transportation Systems Conference (ITSC), vol. 00, pp. 2931\u20132937, 2021.","Y. Cheng, H. Xu, and Y. Liu, \u201CRobust Small Object Detection on the Water Surface through Fusion of Camera and Millimeter Wave Radar,\u201D ser. ICCV, 2021.","Y. Kim, J. W. Choi, and D. Kum, \u201CGRIF Net: Gated Region of Interest Fusion Network for Robust 3D Object Detection from Radar Point Cloud and Monocular Image,\u201D 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), vol. 00, pp. 10 857\u201310 864, 2021.","L. Sta \u0308cker, P. Heidenreich, J. Rambach, and D. Stricker, \u201CFusion Point Pruning for Optimized 2D Object Detection with Radar-Camera Fusion,\u201D 2022 IEEE/CVF Winter Conference on Applications of Computer Vision (WACV), vol. 00, pp. 1275\u20131282, 2022.","A. W. Harley, Z. Fang, J. Li, R. Ambrus, and K. Fragkiadaki, \u201CA Simple Baseline for BEV Perception Without LiDAR,\u201D arXiv, 2022.","K. Bansal, K. Rungta, and D. Bharadia, \u201CRadSegNet: A Reliable Approach to Radar Camera Fusion,\u201D arXiv, 2022.","T. Zhou, J. Chen, Y. Shi, K. Jiang, M. Yang, and D. Yang, \u201CBridging the view disparity between radar and camera features for multi-modal fusion 3d object detection,\u201D IEEE Transactions on Intelligent Vehicles, vol. 8, no. 2, pp. 1523\u20131535, 2023.","Y. Kim, S. Kim, J. W. Choi, and D. Kum, \u201CCRAFT: Camera-Radar 3D Object Detection with Spatio-Contextual Fusion Transformer,\u201D arXiv, 2022.","F. Drews, D. Feng, F. Faion, L. Rosenbaum, M. Ulrich, and C. Gla \u0308ser, \u201CDeepfusion: A robust and modular 3d object detector for lidars, cameras and radars,\u201D in 2022 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS). IEEE, 2022, pp. 560\u2013567.","J.-J. Hwang, H. Kretzschmar, J. Manela, S. Rafferty, N. Armstrong- Crews, T. Chen, and D. Anguelov, \u201CCramnet: Camera-radar fusion with ray-constrained cross-attention for robust 3d object detection,\u201D in Computer Vision\u2013ECCV 2022: 17th European Conference, Tel Aviv, Israel, October 23\u201327, 2022, Proceedings, Part XXXVIII. Springer, 2022, pp. 388\u2013405.","Z. Wu, G. Chen, Y. Gan, L. Wang, and J. Pu, \u201CMvfusion: Multi-view 3d object detection with semantic-aligned radar and camera fusion,\u201D arXiv preprint arXiv:2302.10511, 2023.","Y. Kim, S. Kim, J. Shin, J. W. Choi, and D. Kum, \u201CCrn: Camera radar net for accurate, robust, efficient 3d perception,\u201D arXiv preprint arXiv:2304.00670, 2023.","Zheng L, Li S, Tan B, et al. RCFusion: Fusing 4D Radar and Camera with Bird\u2019s-Eye View Features for 3D Object Detection[J]. IEEE Transactions on Instrumentation and Measurement, 2023.","Xiong W, Liu J, Huang T, et al. LXL: LiDAR Exclusive Lean 3D Object Detection with 4D Imaging Radar and Camera Fusion[J]. arXiv preprint arXiv:2307.00724, 2023.","Guan R, Yao S, Zhu X, et al. Achelous: A Fast Unified Water-surface Panoptic Perception Framework based on Fusion of Monocular Camera and 4D mmWave Radar[J]. arXiv preprint arXiv:2307.07102, 2023."],f=h.map(function(n,a){return(0,e.jsxs)("p",{children:["[",a+1,"] ",n]})}),l=g.map(function(n,a){return(0,e.jsxs)("p",{children:["[",a+1+h.length,"] ",n]})});return(0,e.jsx)("div",{className:"home-page-wrapper content12-wrapper",id:"references",children:(0,e.jsxs)("div",{className:"content12",id:"citation",children:[(0,e.jsx)("h1",{name:"title",class:"title-h1",children:"Citation"}),(0,e.jsx)("div",{style:{backgroundColor:"#f3f6fa",padding:"10px"},children:(0,e.jsxs)("code",{children:["@misc{yao2023radarcamera,",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","title={Radar-Camera Fusion for Object Detection and Semantic Segmentation in Autonomous Driving: A Comprehensive Review}, ",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","author={Shanliang Yao and Runwei Guan and Xiaoyu Huang and Zhuoxiao Li and Xiangyu Sha and Yong Yue and Eng Gee Lim and Hyungjoon Seo and Ka Lok Man and Xiaohui Zhu and Yutao Yue},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","year={2023},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","eprint={2304.10410},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","archivePrefix={arXiv},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","primaryClass={cs.CV}",(0,e.jsx)("br",{}),"}"]})})]})})}}]),r}(k.PureComponent),Fe=Te,Ee=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;return delete i.dataSource,delete i.isMobile,(0,e.jsx)(L(),o()(o()(o()({},i),u.OverPack),{},{children:(0,e.jsx)(V.Z,o()(o()({type:"bottom",leaveReverse:!0,delay:[0,100]},u.titleWrapper),{},{children:u.titleWrapper.children.map(B)}),"page")}))}}]),r}(k.PureComponent),We=Ee,Ve=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=i||[];(function(){var g=document.createElement("script");g.src="https://hm.baidu.com/hm.js?58d144a733fcb2ea441a68157d15c700";var f=document.getElementsByTagName("script")[0];f.parentNode.insertBefore(g,f)})();var u=Object.assign({},(D()(this.props),this.props)),h=u.dataSource;return delete u.dataSource,delete u.isMobile,(0,e.jsx)("div",o()(o()(o()({},u),h.wrapper),{},{children:(0,e.jsx)(L(),o()(o()({},h.OverPack),{},{children:(0,e.jsx)(T.ZP,o()(o()({animation:{y:"+=30",opacity:0,type:"from"}},h.copyright),{},{children:h.copyright.children}),"footer")}))}))}}]),r}(k.PureComponent),Le=Ve,Ge=p.p+"static/logo.0bb42fca.png",Be={isScrollLink:!0,wrapper:{className:"header2 home-page-wrapper jrhtw9ph4a-editor_css"},page:{className:"home-page"},logo:{className:"header2-logo",children:Ge},LinkMenu:{className:"header2-menu",children:[{name:"linkNav",to:"characteristics",children:"Characteristics",className:"menu-item"},{name:"linkNav",to:"datasets",children:"Datasets",className:"menu-item"},{name:"linkNav",to:"methods",children:"Methods",className:"menu-item"},{name:"linkNav",to:"citation",children:"Citation",className:"menu-item"},{name:"linkNav",to:"https://radar-camera-fusion.github.io",children:"Radar Camera Fusion",className:"menu-item"}]},mobileMenu:{className:"header2-mobile-menu"},Menu:{children:[{name:"Banner3_0",to:"Banner3_0",children:"\u9996\u9875",className:"active menu-item"},{name:"Content8_0",to:"Content8_0",children:"\u7279\u9080\u5609\u5BBE",className:"menu-item"},{name:"Content9_0",to:"Content9_0",children:"\u4F1A\u8BAE\u65E5\u7A0B",className:"menu-item"},{name:"Content10_0",to:"Content10_0",children:"\u5927\u4F1A\u5730\u5740",className:"menu-item"},{name:"Content11_0",to:"Content11_0",children:"\u5C55\u53F0\u5C55\u793A",className:"menu-item"},{name:"Content12_0",to:"Content12_0",children:"\u7279\u522B\u9E23\u8C22",className:"menu-item"}]}},Ue={wrapper:{className:"banner3"},textWrapper:{className:"banner3-text-wrapper",children:[{name:"slogan",className:"banner3-slogan",children:"Radar Perception in Autonomous Driving: Exploring Different Data Representations"},{name:"nameEn",className:"banner3-name-en",children:"Shanliang Yao, Runwei Guan, Zitian Peng, Chenhang Xu, Yilu Shi, "},{name:"nameEn",className:"banner3-name-en",children:"Yong Yue, Eng Gee Lim, Hyungjoon Seo, Ka Lok Man, Xiaohui Zhu, Yutao Yue"},{name:"time",className:"banner3-time",children:"University of Liverpool, Xi\u2018an Jiaotong-Liverpool University, Institute of Deep Perception Technology, JITRI"},{name:"button",className:"banner3-button",children:"GitHub: https://github.com/Radar-Camera-Fusion/Awesome-Radar-Perception",type:"primary",href:"https://github.com/Radar-Camera-Fusion/Awesome-Radar-Perception",target:"_blank"}]}},He={OverPack:{className:"home-page-wrapper content13-wrapper",playScale:.3},titleWrapper:{className:"title-wrapper",children:[]}},Je={wrapper:{className:"home-page-wrapper footer0-wrapper"},OverPack:{className:"home-page footer0",playScale:.01},copyright:{className:"copyright",children:(0,e.jsxs)("span",{children:["\xA92023 ",(0,e.jsx)("a",{href:"https://github.com/Radar-Camera-Fusion",children:"Radar-Camera-Fusion"})," All Rights Reserved"]})}},U={wrapper:{className:"home-page-wrapper"},OverPack:{className:"home-page",playScale:.05},copyright:{className:"copyright",children:(0,e.jsxs)("span",{children:["\xA92018 ",(0,e.jsx)("a",{href:"https://motion.ant.design",children:"Ant Motion"})," All Rights Reserved"]})}},N={wrapper:{className:"home-page-wrapper"},OverPack:{className:"home-page",playScale:.05},copyright:{className:"copyright",children:(0,e.jsxs)("span",{children:["\xA92018 ",(0,e.jsx)("a",{href:"https://motion.ant.design",children:"Ant Motion"})," All Rights Reserved"]})}},ze={wrapper:{className:"home-page-wrapper content12-wrapper"},OverPack:{className:"home-page content12",playScale:.05}},ta={wrapper:{className:"home-page-wrapper pricing2-wrapper"},page:{className:"home-page pricing2"},OverPack:{playScale:.3,className:"pricing2-content-wrapper"},titleWrapper:{className:"pricing2-title-wrapper",children:[{name:"title",children:"Comparison of Different Sensors",className:"pricing2-title-h1"}]},Table:{name:"tabsTitle",size:"default",className:"pricing2-table",columns:{children:[{dataIndex:"name",key:"name",name:"empty",childWrapper:{children:[{name:"name",children:" "},{name:"content",children:" "}]}},{dataIndex:"free",key:"free",name:"free",childWrapper:{className:"pricing2-table-name-block",children:[{name:"name",className:"pricing2-table-name",children:(0,e.jsx)("span",{children:(0,e.jsxs)("p",{children:[(0,e.jsx)("span",{children:"Camera"}),(0,e.jsx)("br",{})]})})}]}},{dataIndex:"basic",key:"basic",name:"basic",childWrapper:{className:"pricing2-table-name-block",children:[{name:"name",className:"pricing2-table-name",children:(0,e.jsx)("span",{children:(0,e.jsx)("span",{children:(0,e.jsx)("p",{children:"Radar"})})})}]}},{dataIndex:"pro",key:"pro",name:"pro",childWrapper:{className:"pricing2-table-name-block",children:[{name:"name",className:"pricing2-table-name",children:(0,e.jsx)("span",{children:(0,e.jsx)("p",{children:"LiDAR"})})}]}}]},dataSource:{children:[{name:"list0",children:[{className:"pricing2-table-content-name",name:"name",children:"Color, Texture, Shape"},{name:"content1",children:"images/start-fill.svg",className:"pricing2-table-content"},{children:"Unlimited",name:"content1",className:"pricing2-table-content"},{children:"Unlimited",name:"content2",className:"pricing2-table-content"},{children:"Unlimited",name:"content3",className:"pricing2-table-content"}]},{name:"list1",children:[{className:"pricing2-table-content-name",name:"name",children:"Range Measurement"},{children:"Limited",name:"content0",className:"pricing2-table-content"},{children:"Unlimited",name:"content1",className:"pricing2-table-content"},{children:"Unlimited",name:"content2",className:"pricing2-table-content"},{children:"Unlimited",name:"content3",className:"pricing2-table-content"}]},{name:"list2",children:[{className:"pricing2-table-content-name",name:"name",children:"Velocity Measurement"},{name:"content0",children:"50GB",className:"pricing2-table-content"},{name:"content1",children:"250GB",className:"pricing2-table-content"},{name:"content2",children:"600GB",className:"pricing2-table-content"},{name:"content3",children:"Unlimited",className:"pricing2-table-content"}]},{name:"list3",children:[{className:"pricing2-table-content-name",name:"name",children:"Lighting Robustness"},{children:"-",name:"content0",className:"pricing2-table-content"},{name:"content1",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"},{name:"content2",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list4",children:[{className:"pricing2-table-content-name",name:"name",children:"Weather Robustness"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list5",children:[{className:"pricing2-table-content-name",name:"name",children:"Classification Ability"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"-",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list5",children:[{className:"pricing2-table-content-name",name:"name",children:"3D Perception"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"-",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list5",children:[{className:"pricing2-table-content-name",name:"name",children:"System Cost"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"-",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]}]}}},H;(0,E.ac)(function(v){H=v});var Ke=typeof window!="undefined"?window:{},J=Ke.location,z=J===void 0?{}:J,Xe=function(v){S()(r,v);var m=C()(r);function r(b){var i;return y()(this,r),i=m.call(this,b),i.state={isMobile:H,show:!z.port},i}return R()(r,[{key:"componentDidMount",value:function(){var i=this;(0,E.ac)(function(u){i.setState({isMobile:!!u})}),z.port&&setTimeout(function(){i.setState({show:!0})},500)}},{key:"render",value:function(){var i=this,u=[(0,e.jsx)(re,{id:"Nav0_0",dataSource:Be,isMobile:this.state.isMobile},"Nav0_0"),(0,e.jsx)(de,{id:"Banner3_0",dataSource:Ue,isMobile:this.state.isMobile},"Banner3_0"),(0,e.jsx)(ye,{id:"Dataset0_0",dataSource:U,isMobile:this.state.isMobile},"Dataset0_0"),(0,e.jsx)(Ce,{id:"Dataset0_0",dataSource:U,isMobile:this.state.isMobile},"Dataset0_0"),(0,e.jsx)(xe,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Ae,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Ie,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Pe,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Oe,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Fe,{id:"Fusion0_0",dataSource:ze,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(We,{id:"Content13_0",dataSource:He,isMobile:this.state.isMobile},"Content13_0"),(0,e.jsx)(Le,{id:"Footer0_0",dataSource:Je,isMobile:this.state.isMobile},"Footer0_0")];return(0,e.jsx)("div",{className:"templates-wrapper",ref:function(g){i.dom=g},children:this.state.show&&u})}}]),r}(k.Component)}}]); +"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[842],{79512:function(Qe,F,p){p.r(F),p.d(F,{default:function(){return Xe}});var X=p(56690),y=p.n(X),Z=p(89728),R=p.n(Z),Y=p(61655),S=p.n(Y),Q=p(26389),C=p.n(Q),k=p(62435),E=p(80840),$=p(42122),o=p.n($),q=p(70215),M=p.n(q),ee=p(66115),w=p.n(ee),ae=p(38416),O=p.n(ae),T=p(84289),te=p(43033),e=p(86074),ne=["dataSource","isMobile"],re=function(v){S()(r,v);var m=C()(r);function r(b){var i;return y()(this,r),i=m.call(this,b),O()(w()(i),"phoneClick",function(){var u=!i.state.phoneOpen;i.setState({phoneOpen:u})}),i.state={phoneOpen:!1},i}return R()(r,[{key:"render",value:function(){var i=this,u=this.props,h=u.dataSource,g=u.isMobile,f=M()(u,ne),l=this.state.phoneOpen,n=h.LinkMenu,a=n.children,d=Object.keys(a).map(function(c,s){var _=a[c],I=te.rU,j={};return _.to&&_.to.match(/\//g)&&(j.href=_.to,j.target="_blank",I="a",delete _.to),k.createElement(I,o()(o()(o()({},_),j),{},{key:s.toString()}),a[c].children)}),t=l===void 0?300:null;return(0,e.jsx)(T.ZP,o()(o()(o()({component:"header",animation:{opacity:0,type:"from"}},h.wrapper),f),{},{children:(0,e.jsxs)("div",o()(o()({},h.page),{},{className:"".concat(h.page.className).concat(l?" open":""),children:[(0,e.jsx)(T.ZP,o()(o()({animation:{x:-30,type:"from",ease:"easeOutQuad"}},h.logo),{},{children:(0,e.jsx)("img",{width:"100%",src:h.logo.children,alt:"img"})})),g&&(0,e.jsxs)("div",o()(o()({},h.mobileMenu),{},{onClick:function(){i.phoneClick()},children:[(0,e.jsx)("em",{}),(0,e.jsx)("em",{}),(0,e.jsx)("em",{})]})),(0,e.jsx)(T.ZP,o()(o()({},n),{},{animation:g?{height:0,duration:300,onComplete:function(s){i.state.phoneOpen&&(s.target.style.height="auto")},ease:"easeInOutQuad"}:null,moment:t,reverse:!!l,children:d}))]}))}))}}]),r}(k.Component),oe=re,ie=p(13012),D=p.n(ie),V=p(72806),W=p(1289),se=p(72575),$e=p(83154),ce=["name","texty"],de=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=u.textWrapper.children.map(function(g){var f=g.name,l=g.texty,n=M()(g,ce);return f.match("button")?(0,e.jsx)(V.Z,o()(o()({type:"primary"},n),{},{children:g.children}),f):(0,e.jsx)("div",o()(o()({},n),{},{children:l?(0,e.jsx)(se.Z,{type:"mask-bottom",children:g.children}):g.children}),f)});return(0,e.jsx)("div",o()(o()(o()({},i),u.wrapper),{},{children:(0,e.jsx)(W.Z,o()(o()({type:["bottom","top"],delay:200},u.textWrapper),{},{children:h}),"QueueAnim")}))}}]),r}(k.PureComponent),le=de,ue=p(3600),L=p.n(ue),x=p(91587),pe=p(18698),me=p.n(pe),G=/^http(s)?:\/\/([\w-]+\.)+[\w-]+(\/[\w-./?%&=]*)?/,B=function(m,r){var b=m.name.indexOf("title")===0?"h1":"div";b=m.href?"a":b;var i=typeof m.children=="string"&&m.children.match(G)?k.createElement("img",{src:m.children,alt:"img"}):m.children;return m.name.indexOf("button")===0&&me()(m.children)==="object"&&(i=k.createElement(V.Z,o()({},m.children))),k.createElement(b,o()({key:r.toString()},m),i)},he=["childWrapper"],ge=["columns","dataSource"],fe=["dataSource","isMobile"],_e=["columns","dataSource"],qe=function(v){S()(r,v);var m=C()(r);function r(){var b;y()(this,r);for(var i=arguments.length,u=new Array(i),h=0;h=0}),d=f.children.filter(function(t){return t.key.indexOf("name")===-1});return d.map(function(t,c){var s=[].concat(a[0],t).filter(function(j){return j});s.length>1&&(s[0].colSpan=0,s[1].colSpan=2);var _=l.children.map(function(j){var Ze=j.children.filter(function(P){return P.name.indexOf("name")===-1}),Ye=j.children.filter(function(P){return P.name.indexOf("name")>=0});return o()(o()({},j),{},{children:[].concat(Ye[0],Ze[c]).filter(function(P){return P})})}),I=o()(o()({},n),{},{columns:b.getColumns(s),dataSource:b.getDataSource(_,s)});return(0,e.jsx)(x.Z,o()(o()({},I),{},{pagination:!1,bordered:!0}),c.toString())})}),b}return R()(r,[{key:"render",value:function(){var i=this.props,u=i.dataSource,h=i.isMobile,g=M()(i,fe),f=u.Table,l=u.wrapper,n=u.page,a=u.titleWrapper,d=f.columns,t=f.dataSource,c=M()(f,_e),s=o()(o()({},c),{},{columns:this.getColumns(d.children),dataSource:this.getDataSource(t.children,d.children)}),_=h?this.getMobileChild(f):(0,e.jsx)(x.Z,o()(o()({},s),{},{pagination:!1,bordered:!0}),"table");return(0,e.jsx)("div",o()(o()(o()({},g),l),{},{children:(0,e.jsx)("div",o()({},n))}))}}]),r}(k.PureComponent),ea=null,aa=p(61254),ve=p.p+"static/cover.df8b2232.png",be=p(46889),ke=p(45098),ye=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{name:"Color, Texture, Shape",star:1},{name:"Range Measurement",star:5},{name:"Velocity Measurement",star:5},{name:"Lighting Robustness",star:5},{name:"Weather Robustness",star:5},{name:"Classification Ability",star:2},{name:"3D Perception",star:1},{name:"Cost Advantage",star:4}],g={data:h.map(function(t){return o()(o()({},t),{},{star:t.star})}),xField:"name",yField:"star",appendPadding:[0,20,0,20],color:"#B2934A",legend:!0,meta:{star:{alias:"Radar Ability",min:0,nice:!0,formatter:function(c){return c}}},xAxis:{tickLine:null},yAxis:{label:!1,grid:{alternateColor:"rgba(0, 0, 0, 0.04)"}},point:{size:2},area:{}},f=[{name:"Color, Texture, Shape",star:5},{name:"Range Measurement",star:2},{name:"Velocity Measurement",star:2},{name:"Lighting Robustness",star:3},{name:"Weather Robustness",star:3},{name:"Classification Ability",star:5},{name:"3D Perception",star:3},{name:"Cost Advantage",star:5}],l={data:f.map(function(t){return o()(o()({},t),{},{star:t.star})}),xField:"name",yField:"star",appendPadding:[0,20,0,20],color:"#B66A6A",meta:{star:{alias:"Camera Ability",min:0,nice:!0,formatter:function(c){return c}}},xAxis:{tickLine:null},yAxis:{label:!1,grid:{alternateColor:"rgba(0, 0, 0, 0.04)"}},point:{size:2},area:{}},n=[{name:"Color, Texture, Shape",star:5},{name:"Range Measurement",star:5},{name:"Velocity Measurement",star:5},{name:"Lighting Robustness",star:5},{name:"Weather Robustness",star:5},{name:"Classification Ability",star:5},{name:"3D Perception",star:3},{name:"Cost Advantage",star:4}],a={data:n.map(function(t){return o()(o()({},t),{},{star:t.star})}),xField:"name",yField:"star",color:"#589D9D",meta:{star:{alias:"Fusion Ability",min:0,nice:!0,formatter:function(c){return c}}},xAxis:{tickLine:null},yAxis:{label:!1,grid:{alternateColor:"rgba(0, 0, 0, 0.04)"}},point:{size:2},area:{}},d=function(c,s,_,I){console.log("params",c,s,_,I)};return(0,e.jsx)("div",{class:"home-page-wrapper content6-wrapper",children:(0,e.jsxs)("div",{class:"ant-row home-page content6",id:"applications",children:[(0,e.jsx)("div",{class:"ant-col content6-text ant-col-xs-2 ant-col-md-2"}),(0,e.jsx)("div",{class:"ant-col content6-text ant-col-xs-20 ant-col-md-20",children:(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("div",{className:"chart",children:(0,e.jsx)(be.Z,{src:ve})})})})]})})}}]),r}(k.PureComponent),Re=ye,A=p(20550),U=p(27049),Se=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[],g=[40,.5,13,12,3,11,44,100,400,393,49,10,4,8,8,7,40,35,26,54],f=[60,8,30,28,10,26,65,70,90,88,60,24,12,21,21,18,60,55,40,66];(0,ke.S6)(g,function(d,t){h.push({type:"text",position:[t,f[t]],content:"".concat(d,"k"),style:{textAlign:"center",fontSize:14,fill:"rgba(0,0,0,0.85)"},offsetY:-10})});var l=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name",width:"10%",render:function(t,c){return(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:t[1],children:t[0]})," [",(0,e.jsx)("a",{href:"#references",children:t[2]}),"]"]})}},{title:"Year",dataIndex:"year",sorter:function(t,c){return t.year-c.year}},{title:"Radar Data Representation",dataIndex:"radar_data_representation",filters:[{text:"Point Cloud",value:"Point Cloud"},{text:"Radar Tensor",value:"Radar Tensor"}],onFilter:function(t,c){return c.radar_data_representation.includes(t)},filterSearch:!0,render:function(t,c){return(0,e.jsx)("span",{children:t.map(function(s){var _="";switch(s){case"Point Cloud":_="#108ee9";break;case"ADC Signal":_="#f50";break;case"Radar Tensor":_="#2db7f5";case"Grid Map":_="#87d068";break;case"Micro-Doppler Signature":_="#2db7f5";default:_="#108ee9"}return(0,e.jsx)(A.Z,{color:_,children:s},s)})})}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(t,c){return c.task.includes(t)},filterSearch:!0,render:function(t,c){return(0,e.jsx)("span",{children:t.map(function(s){var _="";switch(s){case"Object Detection":_="#1890ff";break;case"Semantic Segmentation":_="#fa541c";break;case"Object Tracking":_="#fa8c16";break;case"Localization":_="#13c2c2";break;case"Planning":_="#52c41a";break;case"Prediction":_="#f5222d";break;case"":_="#722ed1";break;case"":_="#eb2f96";break;case"":_="#722ed1";break;default:_="blue-inverse"}return(0,e.jsx)(A.Z,{color:_,children:s},s)})})}},{title:"Sensors",dataIndex:"sensors"},{title:"Category Number",dataIndex:"category_number"},{title:"Categories",dataIndex:"categories"},x.Z.EXPAND_COLUMN,{title:"Record Area",dataIndex:"record_area"},{title:"Record Time",dataIndex:"record_time"},{title:"Affiliation",dataIndex:"affiliation"}],n=[{key:"1",name:["nuScenes","https://www.nuscenes.org/nuscenes","1"],year:2019,task:["Object Detection","Object Tracking"],sensors:"Radar (Continental ARS408), Camera,LiDAR",radar_data_representation:["Point Cloud","Grid Map"],category_number:23,categories:"different vehicles, types of pedestrians, mobility devices and other objects",scenarios:"Roads (intersection, crosswalk, roundabout, pedestrian crossing)",record_area:"Boston, Singapore",record_time:"September 2018",affiliation:"nuTonomy",paper_link:"https://openaccess.thecvf.com/content_CVPR_2020/papers/Caesar_nuScenes_A_Multimodal_Dataset_for_Autonomous_Driving_CVPR_2020_paper.pdf"},{key:"2",name:["Astyx","http://www.astyx.net","2"],year:2019,task:["Object Detection"],sensors:"Radar (Astyx 6455 HiRes), Camera, Lidar",radar_data_representation:["Point Cloud"],category_number:7,categories:"Bus, Car, Cyclist, Motorcyclist, Person, Trailer, Truck",scenarios:" Roads (highway, urban, rural, parking, roundabout)",record_area:"South of Germany",record_time:"-",affiliation:"Technical University of Munich",paper_link:"https://ieeexplore.ieee.org/abstract/document/8904734"},{key:"3",name:["SeeingThroughFog","https://www.uni-ulm.de/en/in/driveu/projects/dense-datasets/","3"],year:2020,task:["Object Detection"],sensors:"Radar (77GHz), Stereo/Gated/FIRCamera, Lidar, Environmental Sensors",radar_data_representation:["Point Cloud"],category_number:4,categories:"Passenger Car, Large Vehicle, Pedestrian, Ridable Vehicle",scenarios:" Adverse road conditions (clear, rainy, snowy, foggy, nighttime, urban, highway, rural, traffic)",record_area:"Germany, Sweden, Denmark, and Finland",record_time:"February and December 2019",affiliation:"Mercedes-Benz AG",paper_link:"https://openaccess.thecvf.com/content_CVPR_2020/html/Bijelic_Seeing_Through_Fog_Without_Seeing_Fog_Deep_Multimodal_Sensor_Fusion_CVPR_2020_paper.html"},{key:"4",name:["CARRADA","https://arthurouaknine.github.io/codeanddata/carrada","4"],year:2020,task:["Detection","Semantic Segmentation","Object Tracking"],sensors:"Radar (TI AWR1843), RGB-DCamera, LiDAR",radar_data_representation:["Radar Tensor"],category_number:9,categories:"car, pedestrian, cyclist, and motorbike,radar range, angle, and velocity,radar range, angle, and velocity",scenarios:"Roads (urban, highway, intersection scenarios)",record_area:"Canada",record_time:"-",affiliation:"-",paper_link:"https://ieeexplore.ieee.org/document/9413181"},{key:"5",name:["Zendar","http://zendar.io/dataset","5"],year:2020,task:["Object Detection","Mapping","Localization"],sensors:"Radar (synthetic aperture), Camera,LiDAR",radar_data_representation:["Radar Tensor","Point Cloud"],category_number:3,categories:"vehicles, pedestrians, and cyclists",scenarios:"Roads (diverse urban driving environments)",record_area:"-",record_time:"-",affiliation:"Technical University of Munich",paper_link:"https://openaccess.thecvf.com/content_CVPRW_2020/papers/w6/Mostajabi_High-Resolution_Radar_Dataset_for_Semi-Supervised_Learning_of_Dynamic_Objects_CVPRW_2020_paper.pdf"},{key:"6",name:["RADIATE","http://pro.hw.ac.uk/radiate/","6"],year:2020,task:["Object Detection"],sensors:"Radar (Navtech CTS350-X), Camera",radar_data_representation:["Radar Tensor"],category_number:8,categories:"car, van, bus, truck, motorbike, bicycle, pedestrian and a group of pedestria",scenarios:" Roads (wet, snowy, foggy, rainy, nighttime, urban, highway)",record_area:"Edinburgh",record_time:"Between February 2019 and February 2020",affiliation:"Heriot-Watt University",paper_link:"https://arxiv.org/pdf/2010.09076.pdf"},{key:"7",name:["AIODrive","http://www.aiodrive.org/","7"],year:2020,task:["Object Detection","Object Tracking","Semantic Segmentation","Depth Estimation"],sensors:"RGB, Stereo, Depth, LiDAR, SPAD-LiDAR, Radar, IMU, GPS",radar_data_representation:["Point Cloud"],category_number:11,categories:"Vehicle, Pedestrian, Vegetation, Building, Road, Sidewalk, Wall, Traffic Sign, Pole and Fence",scenarios:" Roads (highway, residential street, parking)",record_area:"one of eight cities from Carla assets",record_time:"-",affiliation:"Carnegie Mellon University",paper_link:"https://www.xinshuoweng.com/papers/AIODrive/arXiv.pdf"},{key:"8",name:["CRUW","https://www.cruwdataset.org/","8"],year:2021,task:["Object Detection"],sensors:"Radar (TI AWR1843, DCA1000), Cameras",radar_data_representation:["Radar Tensor"],category_number:3,categories:"Pedestrian, Cyclist, Car",scenarios:"Roads (parking, campus, city, highway)",record_area:"-",record_time:"-",affiliation:"University of Washington",paper_link:"https://arxiv.org/pdf/2107.14599.pdf"},{key:"9",name:["RaDICaL","https://publish.illinois.edu/radicaldata/","9"],year:2021,task:["Object Detection"],sensors:"Radar (TI IWR1443), RGB-D Camera",radar_data_representation:["ADC Signal"],category_number:2,categories:"Pedestrian, Car",scenarios:" Indoor (people, static clutter), Roads (urban, rural, highway, various traffic scenarios)",record_area:"-",record_time:"-",affiliation:"University of Illinois at Urbana-Champaign",paper_link:"https://ieeexplore.ieee.org/document/9361086"},{key:"10",name:["RadarScenes","https://radar-scenes.com/","10"],year:2021,task:["Object Detection","Semantic Segmentation"],sensors:"Radar (77GHz), Documentary Camera",radar_data_representation:["Point Cloud"],category_number:11,categories:"Car, Large Vehicle, Truck, Bus, Train, Bicycle, Motorized Two-wheeler, Pedestrian, Pedestrian Group, Animal, and Other",scenarios:" Roads (urban, suburban, rural, highway, tunnel, intersection, roundabout, parking)",record_area:"Ulm, Germany",record_time:"Between 2016 and 2018",affiliation:"Mercedes-Benz AG, Stuttgart, Germany",paper_link:"https://arxiv.org/pdf/2104.02493v1.pdf"},{key:"11",name:["RADDet","https://github.com/ZhangAoCanada/RADDet","11"],year:2021,task:["Object Detection"],sensors:"Radar (TI AWR1843), Stereo Cameras",radar_data_representation:["Radar Tensor"],category_number:6,categories:"Person, Bicycle, Car, Motorcycle, Bus, Truck",scenarios:" Roads (urban, rural, highway, intersections, weather conditions)",record_area:"-",record_time:"September to October 2020",affiliation:"University of Ottawa",paper_link:"https://ieeexplore.ieee.org/abstract/document/9469418"},{key:"12",name:["RADIal","https://github.com/valeoai/RADIal","12"],year:2021,task:["Object Detection","Semantic Segmentation"],sensors:"Radar (high-definition), Cameras, LiDAR",radar_data_representation:["ADC Signal","Radar Tensor","Point Cloud"],category_number:1,categories:"Vehicle",scenarios:"Roads (urban, highway, rural)",record_area:"-",record_time:"-",affiliation:"Valeo.ai, Paris, France",paper_link:"https://arxiv.org/abs/2112.10646"},{key:"13",name:["VoD","https://tudelft-iv.github.io/view-of-delft-dataset/","13"],year:2022,task:["Object Detection","Object Tracking"],sensors:"Radar (ZF FRGen 21), Stereo Camera, LiDAR",radar_data_representation:["Point Cloud"],category_number:13,categories:"Car, Pedestrian, Cyclist, Rider, Unused Bicycle, Bicycle Rack, Human Depiction, Moped or Scooter, Motor, Ride Other, Vehicle Other, Truck, Ride Uncertain",scenarios:"Roads (highway, rural, urban)",record_area:"City of Delft (The Netherlands)",record_time:"-",affiliation:"TU Delft, The Netherlands",paper_link:"https://pure.tudelft.nl/ws/portalfiles/portal/115464174/Multi_Class_Road_User_Detection_With_31D_Radar_in_the_View_of_Delft_Dataset.pdf"},{key:"14",name:["Boreas","https://www.boreas.utias.utoronto.ca/","14"],year:2022,task:["Object Detection","Localization","Odometry"],sensors:"Radar (Navtech CIR304-H), Camera, LiDAR",radar_data_representation:["Radar Tensor"],category_number:4,categories:"Car, Pedestrian, Cyclist, Misc",scenarios:"Roads (highway, rural, urban)",record_area:"University of Toronto Institute for Aerospace Studies (UTIAS)",record_time:"November, 2020 and \uFB01nishing in November, 2021",affiliation:"University of Toronto",paper_link:"https://arxiv.org/pdf/2203.10168.pdf"},{key:"15",name:["TJ4DRadSet","https://github.com/TJRadarLab/TJ4DRadSet","15"],year:2022,task:["Object Detection","Object Tracking"],sensors:"Radar (Oculii Eagle), Camera, LiDAR",radar_data_representation:["Point Cloud"],category_number:8,categories:"Car, Pedestrian, Cyclist, Bus, Motorcyclist, Truck, Engineering Vehicle, Tricyclist",scenarios:" Roads (intersections, one-way streets)",record_area:"Suzhou, China",record_time:"Fourth quarter of 2021",affiliation:"Tongji University",paper_link:"https://arxiv.org/vc/arxiv/papers/2204/2204.13483v2.pdf"},{key:"16",name:["K-Radar","https://github.com/kaist-avelab/k-radar","16"],year:2022,task:["Object Detection","Object Tracking","SLAM"],sensors:"Radar (RETINA-4ST), Stereo Cameras, LiDAR",radar_data_representation:["Radar Tensor"],category_number:5,categories:"Pedestrian, Motobike, Bicycle, Sedan, Bus or Truck",scenarios:"Roads (highway, intersection, urban)",record_area:"Daejeon of the Republic of Korea",record_time:"-",affiliation:"KAIST",paper_link:"https://www.researchgate.net/publication/361359662_K-Radar_4D_Radar_Object_Detection_Dataset_and_Benchmark_for_Autonomous_Driving_in_Various_Weather_Conditions"},{key:"17",name:["aiMotive","https://github.com/aimotive/aimotive_dataset","17"],year:2022,task:["Object Detection"],sensors:"Radar (77GHz), Camera, LiDAR",radar_data_representation:["Point cloud"],category_number:14,categories:"Pedestrian, Car, Bus, Truck, Van, Motorcycle, Pickup, Rider, Bicycle, Trailer, Train, Shopping Cart, Other Object",scenarios:" Roads (highway, urban, rural)",record_area:"California, US; Austria; and Hungary",record_time:"-",affiliation:"aimotive",paper_link:"https://openreview.net/pdf?id=yl9aThYT9W"},{key:"18",name:["WaterScenes","https://waterscenes.github.io","18"],year:2023,task:["Object Detection","Segmentation"],sensors:"Radar (Oculii Eagle), Camera, GPS, IMU",radar_data_representation:["point cloud"],category_number:7,categories:"Pier, Buoy, Sailor, Ship, Boat, Vessel, Kayak",scenarios:"Waterways (river, lake, canal, moat)",record_area:"Suzhou, China",record_time:"2022/08-2022/12",affiliation:"XJTLU",paper_link:"https://arxiv.org/pdf/2307.06505v2.pdf"},{key:"19",name:["MulRan","https://sites.google.com/view/mulran-pr","19"],year:2020,task:["Place Recognition"],sensors:"Radar (Navtech CIR204-H), Cameras, LiDAR",radar_data_representation:["Radar Tensor"],category_number:7,categories:"buildings, road, tree, sign, car, pedestrain, bike",scenarios:"Roads (city, highway, intersection, crosswalks, parks, recreational areas, tunnels, bridges)",record_area:"-",record_time:"2018-2019",affiliation:"Politecnico di Milano",paper_link:"https://gisbi-kim.github.io/publications/gkim-2020-icra.pdf"},{key:"20",name:["Oxford Radar RobotCar","http://ori.ox.ac.uk/datasets/radar-robotcar-dataset","20"],year:2020,task:["Object Detection","Odometer"],sensors:"Radar (Navtech CTS350-X), camera, LiDAR, GPS, INS",radar_data_representation:["Radar Tensor","Grid Map"],category_number:7,categories:"Vehicle,Pedestrian,Bicycle,Sign,Road,Lane,Road Marking",scenarios:"Roads (urban, highway, rural, industrial area, residential area, roundabout, intersection)",record_area:"-",record_time:"2019-2020",affiliation:"Department of Engineering Science, University of Oxford, UK",paper_link:"https://arxiv.org/pdf/1909.01300.pdf"},{key:"21",name:["SCORP","www.sensorcortek.ai/publications/","21"],year:2020,task:["Semantic Segmentation"],sensors:"Radar (76 GHz), Camera",radar_data_representation:["Radar Tensor"],category_number:0,categories:"-",scenarios:"Roads (parking lot)",record_area:"-",record_time:"-",affiliation:"University of Ottawa, Ottawa, Canada",paper_link:"https://ieeexplore.ieee.org/abstract/document/9299052"},{key:"22",name:["ColoRadar","https://arpg.github.io/coloradar/","22"],year:2022,task:["Localization"],sensors:"Radar (AWR1843), LiDAR, IMU",radar_data_representation:["Radar Tensor","Point Cloud"],category_number:0,categories:"-",scenarios:"Indoor, outdoor environments",record_area:"-",record_time:"-",affiliation:"Department of Computer Science, University of Colorado Boulder, USA",paper_link:"https://journals.sagepub.com/doi/10.1177/02783649211068535"},{key:"23",name:["Pixset","dataset.leddartech.com","23"],year:2021,task:["Object Detection","Object Tracking"],sensors:"Radar (TI AWR1843), Cameras, LiDARs",radar_data_representation:["Point Cloud"],category_number:0,categories:"-",scenarios:"Roads (Car, Pedestrian, Cyclist)",record_area:"-",record_time:"2019",affiliation:"-",paper_link:"https://arxiv.org/pdf/2102.12010v1.pdf"},{key:"24",name:["NTU4DRadLM","https://github.com/ junzhang2016/NTU4DRadLM","24"],year:2023,task:["SLAM"],sensors:"a 3D LiDAR, a visual camera, a 4D Radar, a thermal camera, an IMU and a RTK GPS",radar_data_representation:["Point Cloud"],category_number:0,categories:"-",scenarios:"Roads (carpark, garden, campus)",record_area:"-",record_time:"-",affiliation:"Nanyang Technological University",paper_link:"https://arxiv.org/pdf/2309.00962.pdf"},{key:"25",name:["Dual-Radar","ttps://github.com/adept- thu/Dual-Radar","25"],year:2023,task:["Object Detection","Object Tracking"],sensors:"Radar (ARS548 RDI, Arbe Phoenix), Camera, LiDAR",radar_data_representation:["Point Cloud"],category_number:6,categories:"Car, Pedestrian, Cyclist, Bus, Truck, other",scenarios:"Roads (carpark, garden, campus)",record_area:"-",record_time:"-",affiliation:"Tsinghua University, Beijing",paper_link:"http://export.arxiv.org/pdf/2310.07602"},{key:"26",name:["Dop-NET","https://github.com/UCLRadarGroup/DopNet","26"],year:2020,task:["Classification"],sensors:"Radar (Ancortek 24GHz)",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:"Gestures (wave, pinch, click, swipe)",record_area:"-",record_time:"-",affiliation:"University College London",paper_link:"https://ietresearch.onlinelibrary.wiley.com/doi/epdf/10.1049/el.2019.4153"},{key:"27",name:["CI4R","-","27"],year:2020,task:["Classification"],sensors:"Radar (77GHz, 24GHz, Xethru)",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:"Activities (walking, picking, sitting, crawling, kneeling, limping)",record_area:"-",record_time:"-",affiliation:"-",paper_link:"https://doi.org/10.1117/12.2559155"},{key:"28",name:["Open Radar Datasets","-","28"],year:2021,task:["Classification"],sensors:"Radar (TI AWR2243), Camera, GPS, IMU",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:"Roads (urban, highway, rural)",record_area:"-",record_time:"-",affiliation:"Norwegian Defence Research Establishment, Kjeller, Norway",paper_link:"https://ieeexplore.ieee.org/abstract/document/9455239"},{key:"29",name:["MCD-Gesture","https://github.com/yadongcs/cross_domain_gesture_dataset","29"],year:2022,task:["Classification"],sensors:"Radar (TI AWR1843)",radar_data_representation:["Micro-Doppler Signature"],category_number:0,categories:"-",scenarios:" Gestures (push, pull, slide left, slide right, clockwise turning, counterclockwise turning)",record_area:"-",record_time:"-",affiliation:"School of Cyber Science and Technology University of Science and Technology of China",paper_link:"https://ieeexplore.ieee.org/abstract/document/9894724"}],a=function(t,c,s,_){console.log("params",t,c,s,_)};return(0,e.jsx)("div",o()(o()(o()({},i),u.wrapper),{},{id:"datasets",children:(0,e.jsxs)("div",{className:"title-wrapper",children:[(0,e.jsx)(U.Z,{orientation:"center",children:(0,e.jsx)("h1",{name:"title",className:"title-h1",children:"Radar Perception Datasets"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},pagination:{pageSize:10,hideOnSinglePage:!0},columns:l,dataSource:n,onChange:a,expandable:{columnTitle:"Size / Scenarios",expandedRowRender:function(t){return(0,e.jsxs)("p",{style:{margin:0},children:["Size: ",t.size,(0,e.jsx)("br",{}),"Scenarios: ",t.scenarios]})},rowExpandable:function(t){return t.name!=="Not Expandable"}}})]})}))}}]),r}(k.PureComponent),Ce=Se,De=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Object Detection":s="#1890ff";break;case"Semantic Segmentation":s="#fa541c";break;case"Object Tracking":s="#fa8c16";break;case"Localization":s="#13c2c2";break;case"Classification/Motion Recognition":s="#52c41a";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Radar Image Reconstruction from Raw ADC Data using Parametric Variational Autoencoder with Domain Adaptation",short_name:"-",year:2020,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"ICPR",paper_link:"https://ieeexplore.ieee.org/abstract/document/9412858",source_code:""},{key:"2",name:"Improved Target Detection and Feature Extraction using a Complex-Valued Adaptive Sine Filter on Radar Time Domain Data",short_name:"-",year:2021,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"EUSIPCO",paper_link:"https://ieeexplore.ieee.org/abstract/document/9616250",source_code:""},{key:"3",name:"Data-Driven Radar Processing Using a Parametric Convolutional Neural Network for Human Activity Classification",short_name:"-",year:2021,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"IEEE Sensors",paper_link:"https://ieeexplore.ieee.org/abstract/document/9464267",source_code:""},{key:"4",name:"Spiking Neural Network-Based Radar Gesture Recognition System Using Raw ADC Data",short_name:"-",year:2021,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"IEEE Sensors Letters",paper_link:"https://ieeexplore.ieee.org/abstract/document/9772332",source_code:""},{key:"5",name:"Detection of Human Breathing in Non-Line-of-Sight Region by Using mmWave FMCW Radar",short_name:"-",year:2022,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"TIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/9897091",source_code:""},{key:"6",name:"CubeLearn: End-to-End Learning for Human Motion Recognition From Raw mmWave Radar Signals",short_name:"CubeLearn",year:2023,task:["Classification/Motion Recognition"],dataset:[],conference_journal:"IEEE IOT",paper_link:"https://ieeexplore.ieee.org/abstract/document/10018429",source_code:""},{key:"7",name:"ADCNet: End-to-end perception with raw radar ADC data",short_name:"ADCNet",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"arXiv",paper_link:"https://arxiv.org/abs/2303.11420",source_code:""},{key:"8",name:"T-FFTRadNet: Object Detection with Swin Vision Transformers from Raw ADC Radar Signals",short_name:"T-FFTRadNet",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"arXiv",paper_link:"https://arxiv.org/abs/2303.16940",source_code:""},{key:"9",name:"Echoes Beyond Points: Unleashing the Power of Raw Radar Data in Multi-modality Fusion",short_name:"Echoes Beyond Points",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"NeurIPS",paper_link:"https://arxiv.org/abs/2307.16532",source_code:""},{key:"10",name:"Azimuth Super-Resolution for FMCW Radar in Autonomous Driving",short_name:"-",year:2023,task:["Object Dection"],dataset:["RADIal"],conference_journal:"CVPR",paper_link:"https://openaccess.thecvf.com/content/CVPR2023/html/Li_Azimuth_Super-Resolution_for_FMCW_Radar_in_Autonomous_Driving_CVPR_2023_paper.html",source_code:""},{key:"11",name:"RF-based child occupation detection in the vehicle interior",short_name:"-",year:2016,task:["Vital Sign"],dataset:[],conference_journal:"IRS",paper_link:"https://ieeexplore.ieee.org/document/7497352",source_code:""},{key:"12",name:"A Theoretical Investigation of the Detection of Vital Signs in Presence of Car Vibrations and RADAR-Based Passenger Classification",short_name:"-",year:2019,task:["Vital Sign"],dataset:[],conference_journal:"TVT",paper_link:"https://ieeexplore.ieee.org/abstract/document/8638548",source_code:""},{key:"13",name:"Non-Contact Vital Signs Monitoring for Multiple Subjects Using a Millimeter Wave FMCW Automotive Radar",short_name:"-",year:2020,task:["Vital Sign"],dataset:[],conference_journal:"IMS",paper_link:"https://ieeexplore.ieee.org/abstract/document/9223838",source_code:""},{key:"14",name:"Sparsity-Based Multi-Person Non-Contact Vital Signs Monitoring via FMCW Radar",short_name:"-",year:2023,task:["Vital Sign"],dataset:[],conference_journal:"JBHI",paper_link:"https://ieeexplore.ieee.org/abstract/document/10065434",source_code:""},{key:"15",name:"Radar-Based Monitoring of Vital Signs: A Tutorial Overview",short_name:"-",year:2023,task:["Vital Sign"],dataset:[],conference_journal:"JPROC",paper_link:"https://ieeexplore.ieee.org/abstract/document/10049295",source_code:""}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)(U.Z,{orientation:"center",children:(0,e.jsx)("h1",{name:"title",className:"title-h1",children:"Radar Perception Methods"})}),(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"ADC Signal Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),xe=De,je=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Detection":s="#1890ff";break;case"Segmentation":s="#fa541c";break;case"Tracking":s="#fa8c16";break;case"Multi-Task":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Experiments with mmWave Automotive Radar Test-bed",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"RA ACSSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/9048939",source_code:"-"},{key:"2",name:"Vehicle Detection With Automotive Radar Using Deep Learning on Range-Azimuth-Doppler Tensors",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"RAD ICCVW",paper_link:"https://ieeexplore.ieee.org/document/9022248",source_code:"-"},{key:"3",name:"Probabilistic oriented object detection in automotive radar",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"RA CVPRW",paper_link:"https://ieeexplore.ieee.org/document/9150751",source_code:"-"},{key:"4",name:"RODNet: Radar Object Detection Using Cross-Modal Supervision",short_name:"RODNet",year:2020,task:["Detection"],dataset:["CRUW"],conference_journal:"RA WACV",paper_link:"https://openaccess.thecvf.com/content/WACV2021/papers/Wang_RODNet_Radar_Object_Detection_Using_Cross-Modal_Supervision_WACV_2021_paper.pdf",source_code:"-"},{key:"5",name:"RODNet: A Real-Time Radar Object Detection Network Cross-Supervised by Camera-Radar Fused Object 3D Localization",short_name:"RODNet",year:2020,task:["Detection"],dataset:["CRUW"],conference_journal:"RA JSTSP",paper_link:"https://ieeexplore.ieee.org/document/9353210",source_code:"-"},{key:"6",name:"Range-Doppler Detection in Automotive Radar with Deep Learning",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"RD IJCNN",paper_link:"https://ieeexplore.ieee.org/document/9207080",source_code:"-"},{key:"7",name:"RAMP-CNN: A Novel Neural Network for Enhanced Automotive Radar Object Recognition",short_name:"RAMP-CNN",year:2020,task:["Detection"],dataset:[],conference_journal:"RAD IEEE Sensors",paper_link:"https://ieeexplore.ieee.org/abstract/document/9249018",source_code:"-"},{key:"8",name:"CNN Based Road User Detection Using the 3D Radar Cube",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"RAD RAL",paper_link:"https://ieeexplore.ieee.org/abstract/document/8962258",source_code:"-"},{key:"9",name:"Graph Convolutional Networks for 3D Object Detection on Radar Data",short_name:"GTR-Net",year:2021,task:["Detection"],dataset:[],conference_journal:"RAD ICCV Workshop",paper_link:"https://openaccess.thecvf.com/content/ICCV2021W/AVVision/html/Meyer_Graph_Convolutional_Networks_for_3D_Object_Detection_on_Radar_Data_ICCVW_2021_paper.html?ref=https://githubhelp.com",source_code:"-"},{key:"10",name:"RADDet: Range-Azimuth-Doppler based Radar Object Detection for Dynamic Road Users",short_name:"RADDet",year:2021,task:["Detection"],dataset:["RADDet"],conference_journal:"RAD CRV",paper_link:"https://openaccess.thecvf.com/content/ICCV2021W/AVVision/html/Meyer_Graph_Convolutional_Networks_for_3D_Object_Detection_on_Radar_Data_ICCVW_2021_paper.html?ref=https://githubhelp.com",source_code:"https://github.com/ZhangAoCanada/RADDet"},{key:"11",name:"DAROD: A Deep Automotive Radar Object Detector on Range-Doppler maps",short_name:"DAROD",year:2022,task:["Detection"],dataset:["CARRADA RADDet"],conference_journal:"RD IV",paper_link:"https://ieeexplore.ieee.org/document/9827281",source_code:"-"},{key:"12",name:"K-Radar: 4D Radar Object Detection for Autonomous Driving in Various Weather Conditions",short_name:"K-Radar",year:2022,task:["Detection"],dataset:["K-Radar"],conference_journal:"RADE NeurIPS",paper_link:"https://proceedings.neurips.cc/paper_files/paper/2022/hash/185fdf627eaae2abab36205dcd19b817-Abstract-Datasets_and_Benchmarks.html",source_code:"https://github.com/kaist-avelab/k-radar"},{key:"13",name:"Enhanced K-Radar: Optimal Density Reduction to Improve Detection Performance and Accessibility of 4D Radar Tensor-based Object Detection",short_name:"Enhanced K-Radar",year:2023,task:["Detection"],dataset:["K-Radar"],conference_journal:"RADE arXiv",paper_link:"https://arxiv.org/abs/2303.06342",source_code:"-"},{key:"14",name:"RSS-Net: Weakly-supervised multi-class semantic segmentation with FMCW radar",short_name:"RSS-Net",year:2020,task:["Segmentation"],dataset:[],conference_journal:"RAD IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/9304674",source_code:"-"},{key:"15",name:"Deep Open Space Segmentation using Automotive Radar",short_name:"-",year:2020,task:["Segmentation"],dataset:[],conference_journal:"RAD ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/9299052",source_code:"-"},{key:"16",name:"PolarNet: Accelerated Deep Open Space Segmentation using Automotive Radar in Polar Domain",short_name:"PolarNet",year:2021,task:["Segmentation"],dataset:[],conference_journal:"RAD VEHITS",paper_link:"https://arxiv.org/abs/2103.03387",source_code:"-"},{key:"17",name:"Multi-view Radar Semantic Segmentation",short_name:"-",year:2021,task:["Segmentation"],dataset:[],conference_journal:"RAD ICCV",paper_link:"https://openaccess.thecvf.com/content/ICCV2021/html/Ouaknine_Multi-View_Radar_Semantic_Segmentation_ICCV_2021_paper.html",source_code:"https://github.com/valeoai/MVRSS"},{key:"18",name:"Raw High-Definition Radar for Multi-Task Learning",short_name:"FFT-RadNet",year:2022,task:["Multi-Task"],dataset:["RADIal"],conference_journal:"CVPR",paper_link:"https://openaccess.thecvf.com/content/CVPR2022/html/Rebut_Raw_High-Definition_Radar_for_Multi-Task_Learning_CVPR_2022_paper.html",source_code:"-"},{key:"19",name:"Cross-Modal Supervision-Based Multitask Learning With Automotive Radar Raw Data",short_name:"-",year:2023,task:["Multi-Task"],dataset:["RADIal"],conference_journal:"RD TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10008067",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Radar Tensor Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Ae=je,Me=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Detection":s="#1890ff";break;case"Segmentation":s="#fa541c";break;case"Tracking":s="#fa8c16";break;case"Odometry":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Comparison of random forest and long short-term memory network performances in classification tasks using radar",short_name:"-",year:2017,task:["Classification"],dataset:[],conference_journal:"SDF",paper_link:"https://ieeexplore.ieee.org/abstract/document/8126350",source_code:"-"},{key:"2",name:"Radar-based Feature Design and Multiclass Classification for Road User Recognition",short_name:"-",year:2018,task:["Classification"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/8500607",source_code:"-"},{key:"3",name:"Off-the-shelf sensor vs. experimental radar - How much resolution is necessary in automotive radar classification?",short_name:"-",year:2020,task:["Classification"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/abstract/document/9190338",source_code:"-"},{key:"4",name:"Radar-PointGNN: Graph Based Object Recognition for Unstructured Radar Point-cloud Data",short_name:"Radar-PointGNN",year:2022,task:["Classification"],dataset:[],conference_journal:"RadarConf",paper_link:"https://ieeexplore.ieee.org/abstract/document/9455172",source_code:"-"},{key:"5",name:"2D Car Detection in Radar Data with PointNets",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/8917000",source_code:"-"},{key:"6",name:"Detection and Tracking on Automotive Radar Data with Deep Learning",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/abstract/document/9190261",source_code:"-"},{key:"7",name:"Seeing Around Street Corners: Non-Line-of-Sight Detection and Tracking In-the-Wild Using Doppler Radar",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"CVPR",paper_link:"https://openaccess.thecvf.com/content_CVPR_2020/html/Scheiner_Seeing_Around_Street_Corners_Non-Line-of-Sight_Detection_and_Tracking_In-the-Wild_Using_CVPR_2020_paper.html",source_code:"-"},{key:"8",name:"RPFA-Net: a 4D RaDAR Pillar Feature Attention Network for 3D Object Detection",short_name:"RPFA-Net",year:2021,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/9564754",source_code:"https://github.com/adept-thu/RPFA-Net"},{key:"9",name:"Comparison of Different Approaches for Identification of Radar Ghost Detections in Automotive Scenarios",short_name:"-",year:2021,task:["Detection"],dataset:[],conference_journal:"RadarConf",paper_link:"https://ieeexplore.ieee.org/document/9454980",source_code:"-"},{key:"10",name:"Contrastive Learning for Automotive mmWave Radar Detection Points Based Instance Segmentation",short_name:"-",year:2022,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/document/9922540",source_code:"-"},{key:"11",name:"3D Object Detection for Multiframe 4-D Automotive Millimeter-Wave Radar Point Cloud",short_name:"-",year:2023,task:["Detection"],dataset:["TJ4DRadSet"],conference_journal:"IEEE Sensors",paper_link:"https://ieeexplore.ieee.org/abstract/document/9944629",source_code:"-"},{key:"12",name:"SMURF: Spatial Multi-Representation Fusion for 3D Object Detection with 4D Imaging Radar",short_name:"SMURF",year:2023,task:["Detection"],dataset:["VoD","TJ4DRadSet"],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10274127",source_code:"-"},{key:"13",name:"MVFAN: Multi-View Feature Assisted Network for 4D Radar Object Detection",short_name:"MVFAN",year:2023,task:["Detection"],dataset:["VoD","Astyx"],conference_journal:"ICONIP",paper_link:"https://arxiv.org/abs/2310.16389",source_code:"-"},{key:"14",name:"Semantic Segmentation on Radar Point Clouds",short_name:"-",year:2018,task:["Segmentation"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/document/8455344",source_code:"-"},{key:"15",name:"Supervised Clustering for Radar Applications: On the Way to Radar Instance Segmentation",short_name:"-",year:2018,task:["Segmentation"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/8917000",source_code:"-"},{key:"16",name:"2D Car Detection in Radar Data with PointNets",short_name:"-",year:2019,task:["Segmentation"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/8917000",source_code:"-"},{key:"17",name:"RSS-Net: Weakly-Supervised Multi-Class Semantic Segmentation with FMCW Radar",short_name:"RSS-Net",year:2020,task:["Segmentation"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/9304674",source_code:"-"},{key:"18",name:"Panoptic Segmentation for Automotive Radar Point Cloud",short_name:"-",year:2022,task:["Segmentation"],dataset:[],conference_journal:"RadarConf",paper_link:"https://ieeexplore.ieee.org/document/9764218",source_code:"-"},{key:"19",name:"Deep Instance Segmentation With Automotive Radar Detection Points",short_name:"-",year:2022,task:["Segmentation"],dataset:["RadarScenes"],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/9762032",source_code:"-"},{key:"20",name:"Detection and Tracking on Automotive Radar Data with Deep Learning",short_name:"-",year:2020,task:["Tracking"],dataset:[],conference_journal:"FUSION",paper_link:"https://ieeexplore.ieee.org/abstract/document/9190261",source_code:"-"},{key:"21",name:"Which Framework is Suitable for Online 3D Multi-Object Tracking for Autonomous Driving with Automotive 4D Imaging Radar?",short_name:"-",year:2023,task:["Tracking"],dataset:[],conference_journal:"arXiv",paper_link:"https://arxiv.org/abs/2309.06036",source_code:"-"},{key:"22",name:"Efficient Deep-Learning 4D Automotive Radar Odometry Method",short_name:"-",year:2023,task:["Odometry"],dataset:[],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10237296",source_code:"-"},{key:"23",name:"DRIO: Robust Radar-Inertial Odometry in Dynamic Environments",short_name:"DRIO",year:2023,task:["Odometry"],dataset:[],conference_journal:"RAL",paper_link:"https://ieeexplore.ieee.org/abstract/document/10207713",source_code:"-"},{key:"24",name:"Person Reidentification Based on Automotive Radar Point Clouds",short_name:"-",year:2021,task:["Gait Recognition"],dataset:[],conference_journal:"TGRS",paper_link:"https://ieeexplore.ieee.org/document/9420713",source_code:"-"},{key:"25",name:"Gait Recognition for Co-Existing Multiple People Using Millimeter Wave Sensing",short_name:"-",year:2020,task:["Gait Recognition"],dataset:[],conference_journal:"AAAI",paper_link:"https://ojs.aaai.org/index.php/AAAI/article/view/5430",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Point Cloud Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Ie=Me,Ne=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Detection":s="#1890ff";break;case"Segmentation":s="#fa541c";break;case"Tracking":s="#fa8c16";break;case"Localization":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Object Classification":s="#eb2f96";break;default:s="blue-inverse"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Automotive Radar Gridmap Representations",short_name:"-",year:2015,task:["Detection"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/7117922",source_code:"-"},{key:"2",name:"Detection of Arbitrarily Rotated Parked Cars Based on Radar Sensors",short_name:"-",year:2015,task:["Detection"],dataset:[],conference_journal:"IRS",paper_link:"https://ieeexplore.ieee.org/abstract/document/7226281",source_code:"-"},{key:"3",name:"3D Occupancy Grid Mapping Using Statistical Radar Models",short_name:"-",year:2016,task:["Detection"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/7535495",source_code:"-"},{key:"4",name:"Semantic Radar Grids",short_name:"-",year:2017,task:["Detection"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/7995871",source_code:"-"},{key:"5",name:"Adaptions for Automotive Radar Based Occupancy Gridmaps",short_name:"-",year:2018,task:["Detection"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/abstract/document/8443484",source_code:"-"},{key:"6",name:"High Resolution Radar-based Occupancy Grid Mapping and Free Space Detection",short_name:"-",year:2018,task:["Detection"],dataset:[],conference_journal:"VEHITS",paper_link:"https://pdfs.semanticscholar.org/d888/6334e15acebe688f993f45da7ba7bde79eff.pdf",source_code:"-"},{key:"7",name:"Semantic Segmentation on Automotive Radar Maps",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"IV",paper_link:"https://ieeexplore.ieee.org/abstract/document/8813808",source_code:"-"},{key:"8",name:"Occupancy Grids Generation Using Deep Radar Network for Autonomous Driving",short_name:"-",year:2019,task:["Detection"],dataset:[],conference_journal:"ITSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/8916897",source_code:"-"},{key:"9",name:"Semantic Segmentation on 3D Occupancy Grids for Automotive Radar",short_name:"-",year:2020,task:["Detection"],dataset:[],conference_journal:"IEEE ACCESS",paper_link:"https://ieeexplore.ieee.org/abstract/document/9229096",source_code:"-"},{key:"10",name:"NVRadarNet: Real-Time Radar Obstacle and Free Space Detection for Autonomous Driving",short_name:"NVRadarNet",year:2023,task:["Detection"],dataset:[],conference_journal:"RA ICRA",paper_link:"https://arxiv.org/abs/2209.14499",source_code:"-"},{key:"11",name:"Road Scene Understanding by Occupancy Grid Learning from Sparse Radar Clusters using Semantic Segmentation",short_name:"-",year:2019,task:["Segmentation"],dataset:[],conference_journal:"ICCV",paper_link:"https://openaccess.thecvf.com/content_ICCVW_2019/html/CVRSUAD/Sless_Road_Scene_Understanding_by_Occupancy_Grid_Learning_from_Sparse_Radar_ICCVW_2019_paper.html",source_code:"-"},{key:"12",name:"CNN based road course estimation on automotive radar data with various gridmaps",short_name:"-",year:2020,task:["Segmentation"],dataset:[],conference_journal:"ICMIM",paper_link:"https://ieeexplore.ieee.org/document/9299086",source_code:"-"},{key:"13",name:"Scene Understanding With Automotive Radar",short_name:"-",year:2020,task:["Segmentation"],dataset:[],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/8911477",source_code:"-"},{key:"14",name:"Semantic Segmentation-Based Occupancy Grid Map Learning With Automotive Radar Raw Data",short_name:"-",year:2023,task:["Segmentation"],dataset:["RADIal"],conference_journal:"TIV",paper_link:"https://ieeexplore.ieee.org/abstract/document/10273590",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Grid Map Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Pe=Ne,we=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=[{title:"Id",dataIndex:"key",width:"10px"},{title:"Name",dataIndex:"name"},{title:"Short Name",dataIndex:"short_name"},{title:"Year",dataIndex:"year",sorter:function(n,a){return n.year-a.year}},{title:"Task",dataIndex:"task",filters:[{text:"Object Detection",value:"Object Detection"},{text:"Semantic Segmentation",value:"Semantic Segmentation"}],onFilter:function(n,a){return a.task.includes(n)},filterSearch:!0,width:"10%",render:function(n,a){var d=n.toString().split("|");console.log(d);var t=[];return d.map(function(c){c=c.trim();var s="";switch(c){case"Object Detection":s="#1890ff";break;case"Semantic Segmentation":s="#fa541c";break;case"Object Tracking":s="#fa8c16";break;case"Localization":s="#13c2c2";break;case"Planning":s="#52c41a";break;case"Prediction":s="#f5222d";break;case"Object Classification":s="#eb2f96";break;default:s="#722ed1"}t.push((0,e.jsx)(A.Z,{color:s,children:c},c))}),t}},{title:"Dataset",dataIndex:"dataset",filters:[{text:"nuScenes",value:"nuScenes"}],onFilter:function(n,a){return a.dataset.includes(n)},filterSearch:!0,render:function(n,a){var d=n.toString().split("|"),t=[];return d.map(function(c){t.push((0,e.jsxs)("div",{children:[(0,e.jsx)("span",{children:c}),(0,e.jsx)("br",{})]}))}),t}},{title:"Conference/Journal",dataIndex:"conference_journal"},{title:"Link",dataIndex:"source_code",render:function(n,a){return console.log(a),a.source_code!=""&&a.source_code!="-"?(0,e.jsxs)("div",{children:[(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"}),"\xA0\xA0",(0,e.jsx)("a",{target:"_blank",href:a.source_code,children:"Code"})]}):(0,e.jsx)("div",{children:(0,e.jsx)("a",{target:"_blank",href:a.paper_link,children:"Paper"})})}}],g=[{key:"1",name:"Human Detection and Activity Classification Based on Micro-Doppler Signatures Using Deep Convolutional Neural Networks",short_name:"-",year:2016,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"IEEE Geoscience and Remote Sensing Letters",paper_link:"https://ieeexplore.ieee.org/abstract/document/7314905",source_code:"-"},{key:"2",name:"New Analysis of Radar Micro-Doppler Gait Signatures for Rehabilitation and Assisted Living",short_name:"-",year:2017,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ICASSP",paper_link:"https://ieeexplore.ieee.org/abstract/document/7952908",source_code:"-"},{key:"3",name:"Human Motion Classification with Micro-Doppler Radar and Bayesian-Optimized Convolutional Neural Networks",short_name:"-",year:2018,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ICASSP",paper_link:"https://ieeexplore.ieee.org/abstract/document/8461847",source_code:"-"},{key:"4",name:"Radar-Based Human-Motion Recognition With Deep Learning: Promising Applications for Indoor Monitoring",short_name:"-",year:2018,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"IEEE Signal Processing Magazine",paper_link:"https://ieeexplore.ieee.org/abstract/document/8746862",source_code:"-"},{key:"5",name:"Radar-Based Human Gait Recognition Using Dual-Channel Deep Convolutional Neural Network",short_name:"-",year:2019,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"TGRS",paper_link:"https://ieeexplore.ieee.org/abstract/document/8789686",source_code:"-"},{key:"6",name:"Experiments with mmWave Automotive Radar Test-bed",short_name:"-",year:2019,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ACSSC",paper_link:"https://ieeexplore.ieee.org/abstract/document/9048939",source_code:"-"},{key:"7",name:"Attention-Based Dual-Stream Vision Transformer for Radar Gait Recognition",short_name:"-",year:2022,task:["Motion (Gait/Gestures/Activity) Classification"],dataset:[],conference_journal:"ICASSP",paper_link:"https://ieeexplore.ieee.org/abstract/document/9746565",source_code:"-"}],f=function(n,a,d,t){console.log("params",n,a,d,t)};return(0,e.jsxs)("div",o()(o()(o()({},i),u.wrapper),{},{id:"methods",children:[(0,e.jsx)("div",{className:"title-wrapper",children:(0,e.jsx)("h2",{name:"title",className:"title-h1",children:"Micro-Doppler Signature Methods"})}),(0,e.jsx)(x.Z,{bordered:!0,scroll:{x:"200px"},columns:h,dataSource:g,onChange:f})]}))}}]),r}(k.PureComponent),Oe=we,Te=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;delete i.dataSource,delete i.isMobile;var h=["S. Chadwick, W. Maddetn, and P. Newman, \u201CDistant vehicle detection using radar and vision,\u201D Proceedings - IEEE International Conference on Robotics and Automation, vol. 2019-May, pp. 8311\u20148317, 2019.","M. Meyer and G. Kuschk, \u201CAstyx: Automotive radar dataset for deep learning based 3D object detection,\u201D EuRAD 2019 - 2019 16th European Radar Conference, pp. 129\u2014132, 2019.","M. Bijelic, T. Gruber, F. Mannan, F. Kraus, W. Ritter, K. Dietmayer, and F. Heide, \u201CSeeing Through Fog Without Seeing Fog: Deep Multimodal Sensor Fusion in Unseen Adverse Weather,\u201D 2020 IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), vol. 00, pp. 11 679\u201311 689, 2020.","A. Ouaknine, A. Newson, J. Rebut, F. Tupin, and P. Perez, \u201CCARRADA dataset: Camera and automotive radar with range-Angle-doppler annotations,\u201D arXiv, 2020.","J. Guan, S. Madani, S. Jog, S. Gupta, and H. Hassanieh, \u201CThrough Fog High-Resolution Imaging Using Millimeter Wave Radar,\u201D ser. Proceedings of the IEEE Computer Society Conference on Computer Vision and Pattern Recognition, 2020, pp. 11 461\u201411 470.","M. Mostajabi, C. M. Wang, D. Ranjan, and G. Hsyu, \u201CHigh resolution radar dataset for semi-supervised learning of dynamic objects,\u201D IEEE Computer Society Conference on Computer Vision and Pattern Recognition Workshops, vol. 2020-June, pp. 450\u2014457, 2020.","M. Sheeny, E. De Pellegrin, S. Mukherjee, A. Ahrabian, S. Wang, and A. Wallace, \u201CRadiate: A radar dataset for automotive perception in bad weather,\u201D in 2021 IEEE International Conference on Robotics and Automation (ICRA). IEEE, 2021, pp. 1\u20137.","X. Weng, Y. Man, D. Cheng, J. Park, M. O\u2019Toole, and K. Kitani, \u201CAll-In-One Drive: A Large-Scale Comprehensive Perception Dataset with High-Density Long-Range Point Clouds.\u201D","Y. Wang, G. Wang, H.-M. Hsu, H. Liu, and J.-N. Hwang, \u201CRethinking of Radar\u2019s Role: A Camera-Radar Dataset and Systematic Annotator via Coordinate Alignment,\u201D in CVPRW, 2021.","T.-Y. Lim, S. A. Markowitz, and M. N. Do, \u201CRaDICaL: A Synchronized FMCW Radar, Depth, IMU and RGB Camera Data Dataset with Low-Level FMCW Radar Signals.\u201D","O. Schumann, M. Hahn, N. Scheiner, F. Weishaupt, J. F. Tilly, J. Dickmann, and C. Wohler, \u201CRadarScenes: A Real-World Radar Point Cloud Data Set for Automotive Applications,\u201D 2021. [Online]. Available: http://arxiv.org/abs/2104.02493","A. Zhang, F. E. Nowruzi, and R. Laganiere, \u201CRADDet: Range-Azimuth-Doppler based Radar Object Detection for Dynamic Road Users,\u201D 2021 18th Conference on Robots and Vision (CRV), vol. 00, pp. 95\u2013102, 2021.","Y. Cheng, J. Zhu, M. Jiang, J. Fu, C. Pang1, P. Wang1, K. Sankaran3, O. Onabola3, Y. Liu2, D. Liu3, and Y. Bengio3, \u201CFloW: A Dataset and Benchmark for Floating Waste Detection in Inland Waters,\u201D ser. ICCV, 2021.","J. Rebut, A. Ouaknine, W. Malik, and P. Pe \u0301rez, \u201CRaw high-definition radar for multi-task learning,\u201D in Proceedings of the IEEE/CVF Confer- ence on Computer Vision and Pattern Recognition, 2022, pp. 17 021\u2013 17 030.","A. Palffy, E. Pool, S. Baratam, J. Kooij, and D. Gavrila, \u201CMulti-class Road User Detection with 3+1D Radar in the View-of-Delft Dataset,\u201D IEEE Robotics and Automation Letters, vol. PP, no. 99, pp. 1\u20131, 2022.","K. Burnett, D. J. Yoon, Y. Wu, A. Z. Li, H. Zhang, S. Lu, J. Qian, W.-K. Tseng, A. Lambert, K. Y. K. Leung, A. P. Schoellig, and T. D. Barfoot, \u201CBoreas: A Multi-Season Autonomous Driving Dataset,\u201D arXiv, 2022.","A. Palffy, E. Pool, S. Baratam, J. Kooij, and D. Gavrila, \u201CMulti-class Road User Detection with 3+1D Radar in the View-of-Delft Dataset,\u201D IEEE Robotics and Automation Letters, vol. PP, no. 99, pp. 1\u20131, 2022.","D.-H. Paek, S.-H. Kong, and K. T. Wijaya, \u201CK-Radar: 4D Radar Object Detection Dataset and Benchmark for Autonomous Driving in Various Weather Conditions,\u201D arXiv, 2022.","T.Matuszka,I.Barton,A \u0301.Butykai,P.Hajas,D.Kiss,D.Kova \u0301cs, S. Kunsa \u0301gi-Ma \u0301te \u0301, P. Lengyel, G. Ne \u0301meth, L. Peto \u030B et al., \u201Caimotive dataset: A multimodal dataset for robust autonomous driving with long-range perception,\u201D arXiv preprint arXiv:2211.09445, 2022.","Yao S, Guan R, Wu Z, et al. Waterscenes: A multi-task 4d radar-camera fusion dataset and benchmark for autonomous driving on water surfaces[J]. arXiv preprint arXiv:2307.06505, 2023."],g=["S. Chadwick, W. Maddern, and P. Newman, \u201CDistant vehicle detection using radar and vision,\u201D in 2019 International Conference on Robotics and Automation (ICRA). IEEE, 2019, pp. 8311\u20138317.","R. Nabati and H. Qi, \u201CRrpn: Radar region proposal network for object detection in autonomous vehicles,\u201D in 2019 IEEE International Conference on Image Processing (ICIP). IEEE, 2019, pp. 3093\u20133097.","H. Jha, V. Lodhi, and D. Chakravarty, \u201CObject Detection and Identification Using Vision and Radar Data Fusion System for Ground-Based Navigation,\u201D 2019 6th International Conference on Signal Processing and Integrated Networks (SPIN), vol. 00, pp. 590\u2013593, 2019.","V. Lekic and Z. Babic, \u201CAutomotive radar and camera fusion using generative adversarial networks,\u201D Computer Vision and Image Under- standing, vol. 184, pp. 1\u20138, 2019.","M. Meyer and G. Kuschk, \u201CDeep learning based 3d object detection for automotive radar and camera,\u201D in 2019 16th European Radar Conference (EuRAD). IEEE, 2019, pp. 133\u2013136.","V. John and S. Mita, \u201CRvnet: Deep sensor fusion of monocular camera and radar for image-based obstacle detection in challenging environments,\u201D in Image and Video Technology: 9th Pacific-Rim Sym- posium, PSIVT 2019, Sydney, NSW, Australia, November 18\u201322, 2019, Proceedings 9. Springer, 2019, pp. 351\u2013364.","T.-Y. Lim, A. Ansari, B. Major, D. Fontijne, M. Hamilton, R. Gowaikar, and S. Subramanian, \u201CRadar and camera early fusion for vehicle detection in advanced driver assistance systems,\u201D in Machine learning for autonomous driving workshop at the 33rd conference on neural information processing systems, vol. 2, 2019, p. 7.","V. John, M. K. Nithilan, S. Mita, H. Tehrani, R. S. Sudheesh, and P. P. Lalu, \u201CSO-Net: Joint Semantic Segmentation and Obstacle Detection Using Deep Fusion of Monocular Camera and Radar,\u201D Lecture Notes in Computer Science, pp. 138\u2013148, 2020.","S. Chang, Y. Zhang, F. Zhang, X. Zhao, S. Huang, Z. Feng, and Z. Wei, \u201CSpatial attention fusion for obstacle detection using mmwave radar and vision sensor,\u201D Sensors, vol. 20, no. 4, p. 956, 2020.","F. Nobis, M. Geisslinger, M. Weber, J. Betz, and M. Lienkamp, \u201CA Deep learning-based radar and camera sensor fusion architecture for object detection,\u201D arXiv, 2020.","M. Bijelic, T. Gruber, F. Mannan, F. Kraus, W. Ritter, K. Dietmayer, and F. Heide, \u201CSeeing through fog without seeing fog: Deep multimodal sensor fusion in unseen adverse weather,\u201D in Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2020, pp. 11 682\u201311 692.","R. Yadav, A. Vierling, and K. Berns, \u201CRadar+ rgb attentive fusion for robust object detection in autonomous vehicles,\u201D arXiv preprint arXiv:2008.13642, 2020.","R. Nabati and H. Qi, \u201CRadar-camera sensor fusion for joint object detection and distance estimation in autonomous vehicles,\u201D arXiv preprint arXiv:2009.08428, 2020","K. Kowol, M. Rottmann, S. Bracke, and H. Gottschalk, \u201CYodar: uncertainty-based sensor fusion for vehicle detection with camera and radar sensors,\u201D arXiv preprint arXiv:2010.03320, 2020.","Y. Wang, Z. Jiang, X. Gao, J.-N. Hwang, G. Xing, and H. Liu, \u201CRODNet: Radar Object Detection using Cross-Modal Supervision,\u201D 2021 IEEE Winter Conference on Applications of Computer Vision(WACV), vol. 00, pp. 504\u2013513, 2021.","X. Gao, G. Xing, S. Roy, and H. Liu, \u201CRAMP-CNN: A Novel Neural Network for Enhanced Automotive Radar Object Recognition,\u201D IEEE Sensors Journal, vol. 21, no. 4, pp. 5119\u20145132, 2021.","L.-q. Li and Y.-l. Xie, \u201CA Feature Pyramid Fusion Detection Algorithm Based on Radar and Camera Sensor,\u201D 2020 15th IEEE International Conference on Signal Processing (ICSP), vol. 1, pp. 366\u2013370, 2020.","J. Kim, Y. Kim, and D. Kum, \u201CLow-level Sensor Fusion for 3D Vehicle Detection using Radar Range-Azimuth Heatmap and Monocular Image,\u201D Lecture Notes in Computer Science, pp. 388\u2013402, 2021.","X. Dong, B. Zhuang, Y. Mao, and L. Liu, \u201CRadar Camera Fusion via Representation Learning in Autonomous Driving,\u201D 2021 IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops (CVPRW), vol. 00, pp. 1672\u20131681, 2021.","F. Nobis, E. Shafiei, P. Karle, J. Betz, and M. Lienkamp, \u201CRadar Voxel Fusion for 3D Object Detection,\u201D Applied Sciences, vol. 11, no. 12, p. 5598, 2021.","H. Cui, J. Wu, J. Zhang, G. Chowdhary, and W. R. Norris, \u201C3D Detection and Tracking for On-road Vehicles with a Monovision Camera and Dual Low-cost 4D mmWave Radars,\u201D 2021 IEEE International Intelligent Transportation Systems Conference (ITSC), vol. 00, pp. 2931\u20132937, 2021.","Y. Cheng, H. Xu, and Y. Liu, \u201CRobust Small Object Detection on the Water Surface through Fusion of Camera and Millimeter Wave Radar,\u201D ser. ICCV, 2021.","Y. Kim, J. W. Choi, and D. Kum, \u201CGRIF Net: Gated Region of Interest Fusion Network for Robust 3D Object Detection from Radar Point Cloud and Monocular Image,\u201D 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), vol. 00, pp. 10 857\u201310 864, 2021.","L. Sta \u0308cker, P. Heidenreich, J. Rambach, and D. Stricker, \u201CFusion Point Pruning for Optimized 2D Object Detection with Radar-Camera Fusion,\u201D 2022 IEEE/CVF Winter Conference on Applications of Computer Vision (WACV), vol. 00, pp. 1275\u20131282, 2022.","A. W. Harley, Z. Fang, J. Li, R. Ambrus, and K. Fragkiadaki, \u201CA Simple Baseline for BEV Perception Without LiDAR,\u201D arXiv, 2022.","K. Bansal, K. Rungta, and D. Bharadia, \u201CRadSegNet: A Reliable Approach to Radar Camera Fusion,\u201D arXiv, 2022.","T. Zhou, J. Chen, Y. Shi, K. Jiang, M. Yang, and D. Yang, \u201CBridging the view disparity between radar and camera features for multi-modal fusion 3d object detection,\u201D IEEE Transactions on Intelligent Vehicles, vol. 8, no. 2, pp. 1523\u20131535, 2023.","Y. Kim, S. Kim, J. W. Choi, and D. Kum, \u201CCRAFT: Camera-Radar 3D Object Detection with Spatio-Contextual Fusion Transformer,\u201D arXiv, 2022.","F. Drews, D. Feng, F. Faion, L. Rosenbaum, M. Ulrich, and C. Gla \u0308ser, \u201CDeepfusion: A robust and modular 3d object detector for lidars, cameras and radars,\u201D in 2022 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS). IEEE, 2022, pp. 560\u2013567.","J.-J. Hwang, H. Kretzschmar, J. Manela, S. Rafferty, N. Armstrong- Crews, T. Chen, and D. Anguelov, \u201CCramnet: Camera-radar fusion with ray-constrained cross-attention for robust 3d object detection,\u201D in Computer Vision\u2013ECCV 2022: 17th European Conference, Tel Aviv, Israel, October 23\u201327, 2022, Proceedings, Part XXXVIII. Springer, 2022, pp. 388\u2013405.","Z. Wu, G. Chen, Y. Gan, L. Wang, and J. Pu, \u201CMvfusion: Multi-view 3d object detection with semantic-aligned radar and camera fusion,\u201D arXiv preprint arXiv:2302.10511, 2023.","Y. Kim, S. Kim, J. Shin, J. W. Choi, and D. Kum, \u201CCrn: Camera radar net for accurate, robust, efficient 3d perception,\u201D arXiv preprint arXiv:2304.00670, 2023.","Zheng L, Li S, Tan B, et al. RCFusion: Fusing 4D Radar and Camera with Bird\u2019s-Eye View Features for 3D Object Detection[J]. IEEE Transactions on Instrumentation and Measurement, 2023.","Xiong W, Liu J, Huang T, et al. LXL: LiDAR Exclusive Lean 3D Object Detection with 4D Imaging Radar and Camera Fusion[J]. arXiv preprint arXiv:2307.00724, 2023.","Guan R, Yao S, Zhu X, et al. Achelous: A Fast Unified Water-surface Panoptic Perception Framework based on Fusion of Monocular Camera and 4D mmWave Radar[J]. arXiv preprint arXiv:2307.07102, 2023."],f=h.map(function(n,a){return(0,e.jsxs)("p",{children:["[",a+1,"] ",n]})}),l=g.map(function(n,a){return(0,e.jsxs)("p",{children:["[",a+1+h.length,"] ",n]})});return(0,e.jsx)("div",{className:"home-page-wrapper content12-wrapper",id:"references",children:(0,e.jsxs)("div",{className:"content12",id:"citation",children:[(0,e.jsx)("h1",{name:"title",class:"title-h1",children:"Citation"}),(0,e.jsx)("div",{style:{backgroundColor:"#f3f6fa",padding:"10px"},children:(0,e.jsxs)("code",{children:["@misc{yao2023radarperception,",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","title={Radar Perception in Autonomous Driving: Exploring Different Data Representations}, ",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","author={Shanliang Yao and Runwei Guan and Zitian Peng and Chenhang Xu and Yilu Shi and Yong Yue and Eng Gee Lim and Hyungjoon Seo and Ka Lok Man and Xiaohui Zhu and Yutao Yue},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","year={2023},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","eprint={2312.04861},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","archivePrefix={arXiv},",(0,e.jsx)("br",{}),"\xA0\xA0\xA0\xA0","primaryClass={cs.CV}",(0,e.jsx)("br",{}),"}"]})})]})})}}]),r}(k.PureComponent),Fe=Te,Ee=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=Object.assign({},(D()(this.props),this.props)),u=i.dataSource;return delete i.dataSource,delete i.isMobile,(0,e.jsx)(L(),o()(o()(o()({},i),u.OverPack),{},{children:(0,e.jsx)(W.Z,o()(o()({type:"bottom",leaveReverse:!0,delay:[0,100]},u.titleWrapper),{},{children:u.titleWrapper.children.map(B)}),"page")}))}}]),r}(k.PureComponent),Ve=Ee,We=function(v){S()(r,v);var m=C()(r);function r(){return y()(this,r),m.apply(this,arguments)}return R()(r,[{key:"render",value:function(){var i=i||[];(function(){var g=document.createElement("script");g.src="https://hm.baidu.com/hm.js?58d144a733fcb2ea441a68157d15c700";var f=document.getElementsByTagName("script")[0];f.parentNode.insertBefore(g,f)})();var u=Object.assign({},(D()(this.props),this.props)),h=u.dataSource;return delete u.dataSource,delete u.isMobile,(0,e.jsx)("div",o()(o()(o()({},u),h.wrapper),{},{children:(0,e.jsx)(L(),o()(o()({},h.OverPack),{},{children:(0,e.jsx)(T.ZP,o()(o()({animation:{y:"+=30",opacity:0,type:"from"}},h.copyright),{},{children:h.copyright.children}),"footer")}))}))}}]),r}(k.PureComponent),Le=We,Ge=p.p+"static/logo.0bb42fca.png",Be={isScrollLink:!0,wrapper:{className:"header2 home-page-wrapper jrhtw9ph4a-editor_css"},page:{className:"home-page"},logo:{className:"header2-logo",children:Ge},LinkMenu:{className:"header2-menu",children:[{name:"linkNav",to:"applications",children:"Applications",className:"menu-item"},{name:"linkNav",to:"datasets",children:"Datasets",className:"menu-item"},{name:"linkNav",to:"methods",children:"Methods",className:"menu-item"},{name:"linkNav",to:"citation",children:"Citation",className:"menu-item"},{name:"linkNav",to:"https://radar-camera-fusion.github.io",children:"Radar Camera Fusion",className:"menu-item"}]},mobileMenu:{className:"header2-mobile-menu"},Menu:{children:[{name:"Banner3_0",to:"Banner3_0",children:"\u9996\u9875",className:"active menu-item"},{name:"Content8_0",to:"Content8_0",children:"\u7279\u9080\u5609\u5BBE",className:"menu-item"},{name:"Content9_0",to:"Content9_0",children:"\u4F1A\u8BAE\u65E5\u7A0B",className:"menu-item"},{name:"Content10_0",to:"Content10_0",children:"\u5927\u4F1A\u5730\u5740",className:"menu-item"},{name:"Content11_0",to:"Content11_0",children:"\u5C55\u53F0\u5C55\u793A",className:"menu-item"},{name:"Content12_0",to:"Content12_0",children:"\u7279\u522B\u9E23\u8C22",className:"menu-item"}]}},Ue={wrapper:{className:"banner3"},textWrapper:{className:"banner3-text-wrapper",children:[{name:"slogan",className:"banner3-slogan",children:"Radar Perception in Autonomous Driving: Exploring Different Data Representations"},{name:"nameEn",className:"banner3-name-en",children:"Shanliang Yao, Runwei Guan, Zitian Peng, Chenhang Xu, Yilu Shi, "},{name:"nameEn",className:"banner3-name-en",children:"Yong Yue, Eng Gee Lim, Hyungjoon Seo, Ka Lok Man, Xiaohui Zhu, Yutao Yue"},{name:"time",className:"banner3-time",children:"University of Liverpool, Xi\u2018an Jiaotong-Liverpool University, Institute of Deep Perception Technology, JITRI"},{name:"button",className:"banner3-button",children:"GitHub: https://github.com/Radar-Camera-Fusion/Awesome-Radar-Perception",type:"primary",href:"https://github.com/Radar-Camera-Fusion/Awesome-Radar-Perception",target:"_blank"}]}},He={OverPack:{className:"home-page-wrapper content13-wrapper",playScale:.3},titleWrapper:{className:"title-wrapper",children:[]}},Je={wrapper:{className:"home-page-wrapper footer0-wrapper"},OverPack:{className:"home-page footer0",playScale:.01},copyright:{className:"copyright",children:(0,e.jsxs)("span",{children:["\xA92023 ",(0,e.jsx)("a",{href:"https://github.com/Radar-Camera-Fusion",children:"Radar-Camera-Fusion"})," All Rights Reserved"]})}},H={wrapper:{className:"home-page-wrapper"},OverPack:{className:"home-page",playScale:.05},copyright:{className:"copyright",children:(0,e.jsxs)("span",{children:["\xA92018 ",(0,e.jsx)("a",{href:"https://motion.ant.design",children:"Ant Motion"})," All Rights Reserved"]})}},N={wrapper:{className:"home-page-wrapper"},OverPack:{className:"home-page",playScale:.05},copyright:{className:"copyright",children:(0,e.jsxs)("span",{children:["\xA92018 ",(0,e.jsx)("a",{href:"https://motion.ant.design",children:"Ant Motion"})," All Rights Reserved"]})}},ze={wrapper:{className:"home-page-wrapper content12-wrapper"},OverPack:{className:"home-page content12",playScale:.05}},ta={wrapper:{className:"home-page-wrapper pricing2-wrapper"},page:{className:"home-page pricing2"},OverPack:{playScale:.3,className:"pricing2-content-wrapper"},titleWrapper:{className:"pricing2-title-wrapper",children:[{name:"title",children:"Comparison of Different Sensors",className:"pricing2-title-h1"}]},Table:{name:"tabsTitle",size:"default",className:"pricing2-table",columns:{children:[{dataIndex:"name",key:"name",name:"empty",childWrapper:{children:[{name:"name",children:" "},{name:"content",children:" "}]}},{dataIndex:"free",key:"free",name:"free",childWrapper:{className:"pricing2-table-name-block",children:[{name:"name",className:"pricing2-table-name",children:(0,e.jsx)("span",{children:(0,e.jsxs)("p",{children:[(0,e.jsx)("span",{children:"Camera"}),(0,e.jsx)("br",{})]})})}]}},{dataIndex:"basic",key:"basic",name:"basic",childWrapper:{className:"pricing2-table-name-block",children:[{name:"name",className:"pricing2-table-name",children:(0,e.jsx)("span",{children:(0,e.jsx)("span",{children:(0,e.jsx)("p",{children:"Radar"})})})}]}},{dataIndex:"pro",key:"pro",name:"pro",childWrapper:{className:"pricing2-table-name-block",children:[{name:"name",className:"pricing2-table-name",children:(0,e.jsx)("span",{children:(0,e.jsx)("p",{children:"LiDAR"})})}]}}]},dataSource:{children:[{name:"list0",children:[{className:"pricing2-table-content-name",name:"name",children:"Color, Texture, Shape"},{name:"content1",children:"images/start-fill.svg",className:"pricing2-table-content"},{children:"Unlimited",name:"content1",className:"pricing2-table-content"},{children:"Unlimited",name:"content2",className:"pricing2-table-content"},{children:"Unlimited",name:"content3",className:"pricing2-table-content"}]},{name:"list1",children:[{className:"pricing2-table-content-name",name:"name",children:"Range Measurement"},{children:"Limited",name:"content0",className:"pricing2-table-content"},{children:"Unlimited",name:"content1",className:"pricing2-table-content"},{children:"Unlimited",name:"content2",className:"pricing2-table-content"},{children:"Unlimited",name:"content3",className:"pricing2-table-content"}]},{name:"list2",children:[{className:"pricing2-table-content-name",name:"name",children:"Velocity Measurement"},{name:"content0",children:"50GB",className:"pricing2-table-content"},{name:"content1",children:"250GB",className:"pricing2-table-content"},{name:"content2",children:"600GB",className:"pricing2-table-content"},{name:"content3",children:"Unlimited",className:"pricing2-table-content"}]},{name:"list3",children:[{className:"pricing2-table-content-name",name:"name",children:"Lighting Robustness"},{children:"-",name:"content0",className:"pricing2-table-content"},{name:"content1",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"},{name:"content2",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list4",children:[{className:"pricing2-table-content-name",name:"name",children:"Weather Robustness"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list5",children:[{className:"pricing2-table-content-name",name:"name",children:"Classification Ability"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"-",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list5",children:[{className:"pricing2-table-content-name",name:"name",children:"3D Perception"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"-",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]},{name:"list5",children:[{className:"pricing2-table-content-name",name:"name",children:"System Cost"},{name:"content0",children:"-",className:"pricing2-table-content"},{name:"content1",children:"-",className:"pricing2-table-content"},{name:"content2",children:"-",className:"pricing2-table-content"},{name:"content3",children:"https://gw.alipayobjects.com/zos/basement_prod/14ce3060-34e6-4b30-9a45-1a6b95542310.svg",className:"pricing2-table-content"}]}]}}},J;(0,E.ac)(function(v){J=v});var Ke=typeof window!="undefined"?window:{},z=Ke.location,K=z===void 0?{}:z,Xe=function(v){S()(r,v);var m=C()(r);function r(b){var i;return y()(this,r),i=m.call(this,b),i.state={isMobile:J,show:!K.port},i}return R()(r,[{key:"componentDidMount",value:function(){var i=this;(0,E.ac)(function(u){i.setState({isMobile:!!u})}),K.port&&setTimeout(function(){i.setState({show:!0})},500)}},{key:"render",value:function(){var i=this,u=[(0,e.jsx)(oe,{id:"Nav0_0",dataSource:Be,isMobile:this.state.isMobile},"Nav0_0"),(0,e.jsx)(le,{id:"Banner3_0",dataSource:Ue,isMobile:this.state.isMobile},"Banner3_0"),(0,e.jsx)(Re,{id:"Dataset0_0",dataSource:H,isMobile:this.state.isMobile},"Dataset0_0"),(0,e.jsx)(Ce,{id:"Dataset0_0",dataSource:H,isMobile:this.state.isMobile},"Dataset0_0"),(0,e.jsx)(xe,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Ae,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Ie,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Pe,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Oe,{id:"Fusion0_0",dataSource:N,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Fe,{id:"Fusion0_0",dataSource:ze,isMobile:this.state.isMobile},"Fusion0_0"),(0,e.jsx)(Ve,{id:"Content13_0",dataSource:He,isMobile:this.state.isMobile},"Content13_0"),(0,e.jsx)(Le,{id:"Footer0_0",dataSource:Je,isMobile:this.state.isMobile},"Footer0_0")];return(0,e.jsx)("div",{className:"templates-wrapper",ref:function(g){i.dom=g},children:this.state.show&&u})}}]),r}(k.Component)}}]);