319 lines
61 KiB
Plaintext
319 lines
61 KiB
Plaintext
[38;5;12m [39m[38;2;255;187;0m[1m[4mAwesome LIDAR [0m[38;5;14m[1m[4m![0m[38;2;255;187;0m[1m[4mAwesome[0m[38;5;14m[1m[4m (https://awesome.re/badge.svg)[0m[38;2;255;187;0m[1m[4m (https://awesome.re)[0m
|
||
|
||
|
||
|
||
[38;5;11m[1m▐[0m[38;5;12m [39m[38;5;12mA curated list of awesome LIDAR sensors and its applications.[39m
|
||
|
||
[38;5;14m[1mLIDAR[0m[38;5;12m [39m[38;5;12m(https://en.wikipedia.org/wiki/Lidar)[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12mremote[39m[38;5;12m [39m[38;5;12msensing[39m[38;5;12m [39m[38;5;12msensor[39m[38;5;12m [39m[38;5;12mthat[39m[38;5;12m [39m[38;5;12muses[39m[38;5;12m [39m[38;5;12mlaser[39m[38;5;12m [39m[38;5;12mlight[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12mmeasure[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12msurroundings[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12m~cm[39m[38;5;12m [39m[38;5;12maccuracy.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12msensory[39m[38;5;12m [39m[38;5;12mdata[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12musually[39m[38;5;12m [39m[38;5;12mreferred[39m[38;5;12m [39m[38;5;12mas[39m[38;5;12m [39m[38;5;12mpoint[39m[38;5;12m [39m[38;5;12mcloud[39m[38;5;12m [39m[38;5;12mwhich[39m[38;5;12m [39m[38;5;12mmeans[39m[38;5;12m [39m[38;5;12mset[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m
|
||
[38;5;12mdata[39m[38;5;12m [39m[38;5;12mpoints[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12mor[39m[38;5;12m [39m[38;5;12m2D.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mlist[39m[38;5;12m [39m[38;5;12mcontains[39m[38;5;12m [39m[38;5;12mhardwares,[39m[38;5;12m [39m[38;5;12mdatasets,[39m[38;5;12m [39m[38;5;12mpoint[39m[38;5;12m [39m[38;5;12mcloud-processing[39m[38;5;12m [39m[38;5;12malgorithms,[39m[38;5;12m [39m[38;5;12mpoint[39m[38;5;12m [39m[38;5;12mcloud[39m[38;5;12m [39m[38;5;12mframeworks,[39m[38;5;12m [39m[38;5;12msimulators[39m[38;5;12m [39m[38;5;12metc.[39m
|
||
|
||
[38;5;12mContributions are welcome! Please [39m[38;5;14m[1mcheck out[0m[38;5;12m (contributing.md) our guidelines.[39m
|
||
|
||
[38;2;255;187;0m[4mContents[0m
|
||
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome LIDAR [0m[38;5;12m (#awesome-lidar-)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mContents[0m[38;5;12m (#contents)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mConventions[0m[38;5;12m (#conventions)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mManufacturers[0m[38;5;12m (#manufacturers)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mDatasets[0m[38;5;12m (#datasets)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mLibraries[0m[38;5;12m (#libraries)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mFrameworks[0m[38;5;12m (#frameworks)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mAlgorithms[0m[38;5;12m (#algorithms)[39m
|
||
[48;5;235m[38;5;249m- **Basic matching algorithms** (#basic-matching-algorithms)[49m[39m[48;5;235m[38;5;249m [49m[39m
|
||
[48;5;235m[38;5;249m- **Semantic segmentation** (#semantic-segmentation)[49m[39m[48;5;235m[38;5;249m [49m[39m
|
||
[48;5;235m[38;5;249m- **Ground segmentation** (#ground-segmentation)[49m[39m[48;5;235m[38;5;249m [49m[39m
|
||
[48;5;235m[38;5;249m- **Simultaneous localization and mapping SLAM and LIDAR-based odometry and or mapping LOAM** (#simultaneous-localization-and-mapping-slam-and-lidar-based-odometry-and-or-mapping-loam)[49m[39m
|
||
[48;5;235m[38;5;249m- **Object detection and object tracking** (#object-detection-and-object-tracking)[49m[39m[48;5;235m[38;5;249m [49m[39m
|
||
[38;5;12m - [39m[38;5;14m[1mSimulators[0m[38;5;12m (#simulators)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mRelated awesome[0m[38;5;12m (#related-awesome)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mOthers[0m[38;5;12m (#others)[39m
|
||
|
||
[38;2;255;187;0m[4mConventions[0m
|
||
|
||
[38;5;12m- Any list item with an OctoCat :octocat: has a GitHub repo or organization[39m
|
||
[38;5;12m- Any list item with a RedCircle :red_circle: has YouTube videos or channel[39m
|
||
[38;5;12m- Any list item with a Paper :newspaper: has a scientific paper or detailed description[39m
|
||
|
||
[38;2;255;187;0m[4mManufacturers[0m
|
||
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mVelodyne[0m[38;5;12m [39m[38;5;12m(https://velodynelidar.com/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mOuster[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mVelodyne[39m[38;5;12m [39m[38;5;12mannounced[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12msuccessful[39m[38;5;12m [39m[38;5;12mcompletion[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mtheir[39m[38;5;12m [39m[48;2;30;30;40m[38;5;13m[3mmerger[0m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mequals,[39m[38;5;12m [39m[38;5;12meffective[39m[38;5;12m [39m[38;5;12mFebruary[39m[38;5;12m [39m[38;5;12m10,[39m[38;5;12m [39m[38;5;12m2023.[39m[38;5;12m [39m[38;5;12mVelodyne[39m[38;5;12m [39m[38;5;12mwas[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12mmechanical[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12msolid-state[39m[38;5;12m [39m[38;5;12mLIDAR[39m[38;5;12m [39m
|
||
[38;5;12mmanufacturer.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mheadquarter[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mSan[39m[38;5;12m [39m[38;5;12mJose,[39m[38;5;12m [39m[38;5;12mCalifornia,[39m[38;5;12m [39m[38;5;12mUSA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/user/VelodyneLiDAR)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mROS driver :octocat:[0m[38;5;12m (https://github.com/ros-drivers/velodyne)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mC++/Python library :octocat:[0m[38;5;12m (https://github.com/valgur/velodyne_decoder)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mOuster[0m[38;5;12m (https://ouster.com/) - LIDAR manufacturer, specializing in digital-spinning LiDARs. Ouster is headquartered in San Francisco, USA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/Ouster-lidar)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/ouster-lidar)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLivox[0m[38;5;12m (https://www.livoxtech.com/) - LIDAR manufacturer.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCnLpB5QxlQUexi40vM12mNQ)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/Livox-SDK)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mSICK[0m[38;5;12m (https://www.sick.com/ag/en/) - Sensor and automation manufacturer, the headquarter is located in Waldkirch, Germany.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/user/SICKSensors)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/SICKAG)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mHokuyo[0m[38;5;12m (https://www.hokuyo-aut.jp/) - Sensor and automation manufacturer, headquartered in Osaka, Japan.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCYzJXC82IEy-h-io2REin5g)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPioneer[0m[38;5;12m (http://autonomousdriving.pioneer/en/3d-lidar/) - LIDAR manufacturer, specializing in MEMS mirror-based raster scanning LiDARs (3D-LiDAR). Pioneer is headquartered in Tokyo, Japan.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/user/PioneerCorporationPR)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLuminar[0m[38;5;12m (https://www.luminartech.com/) - LIDAR manufacturer focusing on compact, auto-grade sensors. Luminar is headquartered Palo Alto, California, USA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mVimeo channel :red_circle:[0m[38;5;12m (https://vimeo.com/luminartech)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/luminartech)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mHesai[0m[38;5;12m (https://www.hesaitech.com/) - Hesai Technology is a LIDAR manufacturer, founded in Shanghai, China.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCG2_ffm6sdMsK-FX8yOLNYQ/videos)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/HesaiTechnology)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mRobosense[0m[38;5;12m (http://www.robosense.ai/) - RoboSense (Suteng Innovation Technology Co., Ltd.) is a LIDAR sensor, AI algorithm and IC chipset maufactuirer based in Shenzhen and Beijing (China).[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCYCK8j678N6d_ayWE_8F3rQ)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/RoboSense-LiDAR)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLSLIDAR[0m[38;5;12m (https://www.lslidar.com/) - LSLiDAR (Leishen Intelligent System Co., Ltd.) is a LIDAR sensor manufacturer and complete solution provider based in Shenzhen, China.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/@lslidar2015)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/Lslidar)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mIbeo[0m[38;5;12m (https://www.ibeo-as.com/) - Ibeo Automotive Systems GmbH is an automotive industry / environmental detection laserscanner / LIDAR manufacturer, based in Hamburg, Germany.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/IbeoAutomotive/)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mInnoviz[0m[38;5;12m (https://innoviz.tech/) - Innoviz technologies / specializes in solid-state LIDARs.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCVc1KFsu2eb20M8pKFwGiFQ)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mQuanenergy[0m[38;5;12m [39m[38;5;12m(https://quanergy.com/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mQuanenergy[39m[38;5;12m [39m[38;5;12mSystems[39m[38;5;12m [39m[38;5;12m/[39m[38;5;12m [39m[38;5;12msolid-state[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mmechanical[39m[38;5;12m [39m[38;5;12mLIDAR[39m[38;5;12m [39m[38;5;12msensors[39m[38;5;12m [39m[38;5;12m/[39m[38;5;12m [39m[38;5;12moffers[39m[38;5;12m [39m[38;5;12mEnd-to-End[39m[38;5;12m [39m[38;5;12msolutions[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mMapping,[39m[38;5;12m [39m[38;5;12mIndustrial[39m[38;5;12m [39m[38;5;12mAutomation,[39m[38;5;12m [39m[38;5;12mTransportation[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mSecurity.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mheadquarter[39m[38;5;12m [39m
|
||
[38;5;12mis[39m[38;5;12m [39m[38;5;12mlocated[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mSunnyvale,[39m[38;5;12m [39m[38;5;12mCalifornia,[39m[38;5;12m [39m[38;5;12mUSA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/QuanergySystems)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mCepton[0m[38;5;12m [39m[38;5;12m(https://www.cepton.com/index.html)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mCepton[39m[38;5;12m [39m[38;5;12m(Cepton[39m[38;5;12m [39m[38;5;12mTechnologies,[39m[38;5;12m [39m[38;5;12mInc.)[39m[38;5;12m [39m[38;5;12m/[39m[38;5;12m [39m[38;5;12mpioneers[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mfrictionless,[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mmirrorless[39m[38;5;12m [39m[38;5;12mdesign,[39m[38;5;12m [39m[38;5;12mself-developed[39m[38;5;12m [39m[38;5;12mMMT[39m[38;5;12m [39m[38;5;12m(micro[39m[38;5;12m [39m[38;5;12mmotion[39m[38;5;12m [39m[38;5;12mtechnology)[39m[38;5;12m [39m[38;5;12mlidar[39m[38;5;12m [39m[38;5;12mtechnology.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mheadquarter[39m
|
||
[38;5;12mis[39m[38;5;12m [39m[38;5;12mlocated[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mSan[39m[38;5;12m [39m[38;5;12mJose,[39m[38;5;12m [39m[38;5;12mCalifornia,[39m[38;5;12m [39m[38;5;12mUSA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCUgkBZZ1UWWkkXJ5zD6o8QQ)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mBlickfeld[0m[38;5;12m (https://www.blickfeld.com/) - Blickfeld is a solid-state LIDAR manufacturer for autonomous mobility and IoT, based in München, Germany.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/BlickfeldLiDAR)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/Blickfeld)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mNeuvition[0m[38;5;12m (https://www.neuvition.com/) - Neuvition is a solid-state LIDAR manufacturer based in Wujiang, China.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UClFjlekWJo4T5bfzxX0ZW3A)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mAeva[0m[38;5;12m [39m[38;5;12m(https://www.aeva.com/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mAeva[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12mbringing[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mnext[39m[38;5;12m [39m[38;5;12mwave[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mperception[39m[38;5;12m [39m[38;5;12mtechnology[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12mall[39m[38;5;12m [39m[38;5;12mdevices[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12mautomated[39m[38;5;12m [39m[38;5;12mdriving,[39m[38;5;12m [39m[38;5;12mconsumer[39m[38;5;12m [39m[38;5;12melectronics,[39m[38;5;12m [39m[38;5;12mhealth,[39m[38;5;12m [39m[38;5;12mindustrial[39m[38;5;12m [39m[38;5;12mrobotics[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12msecurity,[39m[38;5;12m [39m[38;5;12mMountain[39m[38;5;12m [39m[38;5;12mView,[39m[38;5;12m [39m
|
||
[38;5;12mCalifornia,[39m[38;5;12m [39m[38;5;12mUSA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/AevaInc)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/aevainc)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mXenomatiX[0m[38;5;12m (https://www.xenomatix.com/) - XenomatiX offers true solid-state lidar sensors based on a multi-beam lasers concept. XenomatiX is headquartered in Leuven, Belgium.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/@XenomatiXTruesolidstatelidar)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mMicroVision[0m[38;5;12m (https://microvision.com/) - A pioneer in MEMS-based laser beam scanning technology, the main focus is on building Automotive grade Lidar sensors, located in Hamburg, Germany.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/user/mvisvideo)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub organization :octocat:[0m[38;5;12m (https://github.com/MicroVision-Inc)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPreAct[0m[38;5;12m (https://www.preact-tech.com/) - PreAct's mission is to make life safer and more efficient for the automotive industry and beyond. The headquarter is located in Portland, Oregon, USA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/@PreActTechnologies)[39m
|
||
|
||
[38;2;255;187;0m[4mDatasets[0m
|
||
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mFord[0m[38;5;14m[1m [0m[38;5;14m[1mDataset[0m[38;5;12m [39m[38;5;12m(https://avdata.ford.com/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12mtime-stamped[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mcontains[39m[38;5;12m [39m[38;5;12mraw[39m[38;5;12m [39m[38;5;12mdata[39m[38;5;12m [39m[38;5;12mfrom[39m[38;5;12m [39m[38;5;12mall[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12msensors,[39m[38;5;12m [39m[38;5;12mcalibration[39m[38;5;12m [39m[38;5;12mvalues,[39m[38;5;12m [39m[38;5;12mpose[39m[38;5;12m [39m[38;5;12mtrajectory,[39m[38;5;12m [39m[38;5;12mground[39m[38;5;12m [39m[38;5;12mtruth[39m[38;5;12m [39m[38;5;12mpose,[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12mmaps.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mdata[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12mRobot[39m[38;5;12m [39m
|
||
[38;5;12mOperating[39m[38;5;12m [39m[38;5;12mSystem[39m[38;5;12m [39m[38;5;12m(ROS)[39m[38;5;12m [39m[38;5;12mcompatible.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2003.07969.pdf)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/Ford/AVData)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAudi A2D2 Dataset[0m[38;5;12m (https://www.a2d2.audi) - The dataset features 2D semantic segmentation, 3D point clouds, 3D bounding boxes, and vehicle bus data.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://www.a2d2.audi/content/dam/a2d2/dataset/a2d2-audi-autonomous-driving-dataset.pdf)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mWaymo Open Dataset[0m[38;5;12m (https://waymo.com/open/) - The dataset contains independently-generated labels for lidar and camera data, not simply projections.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mOxford RobotCar[0m[38;5;12m (https://robotcar-dataset.robots.ox.ac.uk/) - The Oxford RobotCar Dataset contains over 100 repetitions of a consistent route through Oxford, UK, captured over a period of over a year. [39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/ORIOxfordRoboticsInstitute)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://robotcar-dataset.robots.ox.ac.uk/images/RCD_RTK.pdf)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mEU[0m[38;5;14m[1m [0m[38;5;14m[1mLong-term[0m[38;5;14m[1m [0m[38;5;14m[1mDataset[0m[38;5;12m [39m[38;5;12m(https://epan-utbm.github.io/utbm_robocar_dataset/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mThis[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mwas[39m[38;5;12m [39m[38;5;12mcollected[39m[38;5;12m [39m[38;5;12mwith[39m[38;5;12m [39m[38;5;12mour[39m[38;5;12m [39m[38;5;12mrobocar[39m[38;5;12m [39m[38;5;12m(in[39m[38;5;12m [39m[38;5;12mhuman[39m[38;5;12m [39m[38;5;12mdriving[39m[38;5;12m [39m[38;5;12mmode[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mcourse),[39m[38;5;12m [39m[38;5;12mequipped[39m[38;5;12m [39m[38;5;12mup[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12meleven[39m[38;5;12m [39m[38;5;12mheterogeneous[39m[38;5;12m [39m[38;5;12msensors,[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m
|
||
[38;5;12mdowntown[39m[38;5;12m [39m[38;5;12m(for[39m[38;5;12m [39m[38;5;12mlong-term[39m[38;5;12m [39m[38;5;12mdata)[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12msuburb[39m[38;5;12m [39m[38;5;12m(for[39m[38;5;12m [39m[38;5;12mroundabout[39m[38;5;12m [39m[38;5;12mdata)[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mMontbéliard[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mFrance.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mvehicle[39m[38;5;12m [39m[38;5;12mspeed[39m[38;5;12m [39m[38;5;12mwas[39m[38;5;12m [39m[38;5;12mlimited[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12m50[39m[38;5;12m [39m[38;5;12mkm/h[39m[38;5;12m [39m[38;5;12mfollowing[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mFrench[39m[38;5;12m [39m[38;5;12mtraffic[39m[38;5;12m [39m[38;5;12mrules.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mNuScenes[0m[38;5;12m (https://www.nuscenes.org/) - Public large-scale dataset for autonomous driving.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1903.11027.pdf)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLyft[0m[38;5;12m (https://level5.lyft.com/dataset/) - Public dataset collected by a fleet of Ford Fusion vehicles equipped with LIDAR and camera.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mKITTI[0m[38;5;12m (http://www.cvlibs.net/datasets/kitti/raw_data.php) - Widespread public dataset, pirmarily focusing on computer vision applications, but also contains LIDAR point cloud.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mSemantic KITTI[0m[38;5;12m (http://semantic-kitti.org/) - Dataset for semantic and panoptic scene segmentation.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=3qNOXvkpK4I)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mCADC - Canadian Adverse Driving Conditions Dataset[0m[38;5;12m (http://cadcd.uwaterloo.ca/) - Public large-scale dataset for autonomous driving in adverse weather conditions (snowy weather).[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2001.10117.pdf)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mUofTPed50[0m[38;5;14m[1m [0m[38;5;14m[1mDataset[0m[38;5;12m [39m[38;5;12m(https://www.autodrive.utoronto.ca/uoftped50)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mUniversity[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mToronto,[39m[38;5;12m [39m[38;5;12maUToronto's[39m[38;5;12m [39m[38;5;12mself-driving[39m[38;5;12m [39m[38;5;12mcar[39m[38;5;12m [39m[38;5;12mdataset,[39m[38;5;12m [39m[38;5;12mwhich[39m[38;5;12m [39m[38;5;12mcontains[39m[38;5;12m [39m[38;5;12mGPS/IMU,[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12mLIDAR,[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mMonocular[39m[38;5;12m [39m[38;5;12mcamera[39m[38;5;12m [39m[38;5;12mdata.[39m[38;5;12m [39m[38;5;12mIt[39m[38;5;12m [39m[38;5;12mcan[39m[38;5;12m [39m[38;5;12mbe[39m[38;5;12m [39m[38;5;12mused[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12m3D[39m
|
||
[38;5;12mpedestrian[39m[38;5;12m [39m[38;5;12mdetection.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1905.08758.pdf)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mPandaSet[0m[38;5;14m[1m [0m[38;5;14m[1mOpen[0m[38;5;14m[1m [0m[38;5;14m[1mDataset[0m[38;5;12m [39m[38;5;12m(https://scale.com/open-datasets/pandaset)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mPublic[39m[38;5;12m [39m[38;5;12mlarge-scale[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12mautonomous[39m[38;5;12m [39m[38;5;12mdriving[39m[38;5;12m [39m[38;5;12mprovided[39m[38;5;12m [39m[38;5;12mby[39m[38;5;12m [39m[38;5;12mHesai[39m[38;5;12m [39m[38;5;12m&[39m[38;5;12m [39m[38;5;12mScale.[39m[38;5;12m [39m[38;5;12mIt[39m[38;5;12m [39m[38;5;12menables[39m[38;5;12m [39m[38;5;12mresearchers[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12mstudy[39m[38;5;12m [39m[38;5;12mchallenging[39m[38;5;12m [39m[38;5;12murban[39m[38;5;12m [39m[38;5;12mdriving[39m[38;5;12m [39m
|
||
[38;5;12msituations[39m[38;5;12m [39m[38;5;12musing[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mfull[39m[38;5;12m [39m[38;5;12msensor[39m[38;5;12m [39m[38;5;12msuit[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12mreal[39m[38;5;12m [39m[38;5;12mself-driving-car.[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mCirrus[0m[38;5;14m[1m [0m[38;5;14m[1mdataset[0m[38;5;12m [39m[38;5;12m(https://developer.volvocars.com/open-datasets/cirrus/)[39m[38;5;12m [39m[38;5;12mA[39m[38;5;12m [39m[38;5;12mpublic[39m[38;5;12m [39m[38;5;12mdatatset[39m[38;5;12m [39m[38;5;12mfrom[39m[38;5;12m [39m[38;5;12mnon-uniform[39m[38;5;12m [39m[38;5;12mdistribution[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mLIDAR[39m[38;5;12m [39m[38;5;12mscanning[39m[38;5;12m [39m[38;5;12mpatterns[39m[38;5;12m [39m[38;5;12mwith[39m[38;5;12m [39m[38;5;12memphasis[39m[38;5;12m [39m[38;5;12mon[39m[38;5;12m [39m[38;5;12mlong[39m[38;5;12m [39m[38;5;12mrange.[39m[38;5;12m [39m[38;5;12mIn[39m[38;5;12m [39m[38;5;12mthis[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mLuminar[39m[38;5;12m [39m[38;5;12mHydra[39m[38;5;12m [39m
|
||
[38;5;12mLIDAR[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12mused.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12mavailable[39m[38;5;12m [39m[38;5;12mat[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mVolvo[39m[38;5;12m [39m[38;5;12mCars[39m[38;5;12m [39m[38;5;12mInnovation[39m[38;5;12m [39m[38;5;12mPortal.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2012.02938.pdf)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mUSyd[0m[38;5;14m[1m [0m[38;5;14m[1mDataset-[0m[38;5;14m[1m [0m[38;5;14m[1mThe[0m[38;5;14m[1m [0m[38;5;14m[1mUniverisity[0m[38;5;14m[1m [0m[38;5;14m[1mof[0m[38;5;14m[1m [0m[38;5;14m[1mSydney[0m[38;5;14m[1m [0m[38;5;14m[1mCampus-[0m[38;5;14m[1m [0m[38;5;14m[1mDataset[0m[38;5;12m [39m[38;5;12m(http://its.acfr.usyd.edu.au/datasets/usyd-campus-dataset/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mLong-term,[39m[38;5;12m [39m[38;5;12mlarge-scale[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mcollected[39m[38;5;12m [39m[38;5;12mover[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mperiod[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12m1.5[39m[38;5;12m [39m[38;5;12myears[39m[38;5;12m [39m[38;5;12mon[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12mweekly[39m[38;5;12m [39m[38;5;12mbasis[39m[38;5;12m [39m
|
||
[38;5;12mover[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mUniversity[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mSydney[39m[38;5;12m [39m[38;5;12mcampus[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12msurrounds.[39m[38;5;12m [39m[38;5;12mIt[39m[38;5;12m [39m[38;5;12mincludes[39m[38;5;12m [39m[38;5;12mmultiple[39m[38;5;12m [39m[38;5;12msensor[39m[38;5;12m [39m[38;5;12mmodalities[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mcovers[39m[38;5;12m [39m[38;5;12mvarious[39m[38;5;12m [39m[38;5;12menvironmental[39m[38;5;12m [39m[38;5;12mconditions.[39m[38;5;12m [39m[38;5;12mROS[39m[38;5;12m [39m[38;5;12mcompatible[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://ieeexplore.ieee.org/document/9109704)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mBrno Urban Dataset :octocat:[0m[38;5;12m (https://github.com/Robotics-BUT/Brno-Urban-Dataset) - Navigation and localisation dataset for self driving cars and autonomous robots in Brno, Czechia.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://ieeexplore.ieee.org/document/9197277)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=wDFePIViwqY)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mArgoverse[0m[38;5;14m[1m [0m[38;5;14m[1m:octocat:[0m[38;5;12m [39m[38;5;12m(https://www.argoverse.org/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mA[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mdesigned[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12msupport[39m[38;5;12m [39m[38;5;12mautonomous[39m[38;5;12m [39m[38;5;12mvehicle[39m[38;5;12m [39m[38;5;12mperception[39m[38;5;12m [39m[38;5;12mtasks[39m[38;5;12m [39m[38;5;12mincluding[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12mtracking[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mmotion[39m[38;5;12m [39m[38;5;12mforecasting[39m[38;5;12m [39m[38;5;12mcollected[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mPittsburgh,[39m[38;5;12m [39m[38;5;12mPennsylvania[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m
|
||
[38;5;12mMiami,[39m[38;5;12m [39m[38;5;12mFlorida,[39m[38;5;12m [39m[38;5;12mUSA.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://openaccess.thecvf.com/content_CVPR_2019/papers/Chang_Argoverse_3D_Tracking_and_Forecasting_With_Rich_Maps_CVPR_2019_paper.pdf)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=DM8jWfi69zM)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mBoreas[0m[38;5;14m[1m [0m[38;5;14m[1mDataset[0m[38;5;12m [39m[38;5;12m(https://www.boreas.utias.utoronto.ca/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mBoreas[39m[38;5;12m [39m[38;5;12mdataset[39m[38;5;12m [39m[38;5;12mwas[39m[38;5;12m [39m[38;5;12mcollected[39m[38;5;12m [39m[38;5;12mby[39m[38;5;12m [39m[38;5;12mdriving[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12mrepeated[39m[38;5;12m [39m[38;5;12mroute[39m[38;5;12m [39m[38;5;12mover[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mcourse[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12m1[39m[38;5;12m [39m[38;5;12myear[39m[38;5;12m [39m[38;5;12mresulting[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12mstark[39m[38;5;12m [39m[38;5;12mseasonal[39m[38;5;12m [39m[38;5;12mvariations.[39m[38;5;12m [39m[38;5;12mIn[39m[38;5;12m [39m[38;5;12mtotal,[39m[38;5;12m [39m[38;5;12mBoreas[39m[38;5;12m [39m[38;5;12mcontains[39m
|
||
[38;5;12mover[39m[38;5;12m [39m[38;5;12m350km[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mdriving[39m[38;5;12m [39m[38;5;12mdata[39m[38;5;12m [39m[38;5;12mincluding[39m[38;5;12m [39m[38;5;12mseveral[39m[38;5;12m [39m[38;5;12msequences[39m[38;5;12m [39m[38;5;12mwith[39m[38;5;12m [39m[38;5;12madverse[39m[38;5;12m [39m[38;5;12mweather[39m[38;5;12m [39m[38;5;12mconditions[39m[38;5;12m [39m[38;5;12msuch[39m[38;5;12m [39m[38;5;12mas[39m[38;5;12m [39m[38;5;12mrain[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mheavy[39m[38;5;12m [39m[38;5;12msnow.[39m[38;5;12m [39m[38;5;12mThe[39m[38;5;12m [39m[38;5;12mBoreas[39m[38;5;12m [39m[38;5;12mdata-taking[39m[38;5;12m [39m[38;5;12mplatform[39m[38;5;12m [39m[38;5;12mfeatures[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12munique[39m[38;5;12m [39m[38;5;12mhigh-quality[39m[38;5;12m [39m[38;5;12msensor[39m[38;5;12m [39m[38;5;12msuite[39m[38;5;12m [39m[38;5;12mwith[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12m128-channel[39m[38;5;12m [39m
|
||
[38;5;12mVelodyne[39m[38;5;12m [39m[38;5;12mAlpha[39m[38;5;12m [39m[38;5;12mPrime[39m[38;5;12m [39m[38;5;12mlidar,[39m[38;5;12m [39m[38;5;12ma[39m[38;5;12m [39m[38;5;12m360-degree[39m[38;5;12m [39m[38;5;12mNavtech[39m[38;5;12m [39m[38;5;12mradar,[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12maccurate[39m[38;5;12m [39m[38;5;12mground[39m[38;5;12m [39m[38;5;12mtruth[39m[38;5;12m [39m[38;5;12mposes[39m[38;5;12m [39m[38;5;12mobtained[39m[38;5;12m [39m[38;5;12mfrom[39m[38;5;12m [39m[38;5;12man[39m[38;5;12m [39m[38;5;12mApplanix[39m[38;5;12m [39m[38;5;12mPOSLV[39m[38;5;12m [39m[38;5;12mGPS/IMU.[39m[38;5;12m [39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper 📰[0m[38;5;12m (https://arxiv.org/abs/2203.10168)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/utiasASRL/pyboreas)[39m
|
||
|
||
[38;2;255;187;0m[4mLibraries[0m
|
||
|
||
[38;5;12m- [39m[38;5;14m[1mPoint Cloud Library (PCL)[0m[38;5;12m (http://www.pointclouds.org/) - Popular highly parallel programming library, with numerous industrial and research use-cases. [39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PointCloudLibrary/pcl)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mOpen3D library[0m[38;5;12m (http://www.open3d.org/docs/release/) - Open3D library contanins 3D data processing and visualization algorithms. It is open-source and supports both C++ and Python.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/intel-isl/Open3D)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCRJBlASPfPBtPXJSPffJV-w)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPyTorch Geometric :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1903.02428.pdf) - A geometric deep learning extension library for PyTorch.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/rusty1s/pytorch_geometric)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPyTorch3d[0m[38;5;12m (https://pytorch3d.org/) - PyTorch3d is a library for deep learning with 3D data written and maintained by the Facebook AI Research Computer Vision Team.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/facebookresearch/pytorch3d)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mKaolin[0m[38;5;12m (https://kaolin.readthedocs.io/en/latest/) - Kaolin is a PyTorch Library for Accelerating 3D Deep Learning Research written by NVIDIA Technologies for game and application developers.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/NVIDIAGameWorks/kaolin/)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1911.05063.pdf)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPyVista[0m[38;5;12m (https://docs.pyvista.org/) - 3D plotting and mesh analysis through a streamlined interface for the Visualization Toolkit.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/pyvista/pyvista)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://joss.theoj.org/papers/10.21105/joss.01450)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mpyntcloud[0m[38;5;12m (https://pyntcloud.readthedocs.io/en/latest/) - Pyntcloud is a Python 3 library for working with 3D point clouds leveraging the power of the Python scientific stack.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/daavoo/pyntcloud)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mpointcloudset[0m[38;5;12m (https://virtual-vehicle.github.io/pointcloudset/) - Python library for efficient analysis of large datasets of point clouds recorded over time.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/virtual-vehicle/pointcloudset) [39m
|
||
|
||
[38;2;255;187;0m[4mFrameworks[0m
|
||
|
||
[38;5;12m- [39m[38;5;14m[1mAutoware[0m[38;5;12m (https://www.autoware.ai/) - Popular framework in academic and research applications of autonomous vehicles.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitLab repository :octocat:[0m[38;5;12m (https://gitlab.com/autowarefoundation/autoware.ai)[39m
|
||
[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mPaper[0m[38;5;14m[1m [0m[38;5;14m[1m:newspaper:[0m[38;5;12m [39m
|
||
[38;5;12m(https://www.researchgate.net/profile/Takuya_Azumi/publication/327198306_Autoware_on_Board_Enabling_Autonomous_Vehicles_with_Embedded_Systems/links/5c9085da45851564fae6dcd0/Autoware-on-Board-Enabling-Autonomous-[39m
|
||
[38;5;12mVehicles-with-Embedded-Systems.pdf)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mBaidu Apollo[0m[38;5;12m (https://apollo.auto/) - Apollo is a popular framework which accelerates the development, testing, and deployment of Autonomous Vehicles.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/ApolloAuto/apollo)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/ApolloAuto)[39m
|
||
|
||
[38;2;255;187;0m[4mAlgorithms[0m
|
||
|
||
[38;2;255;187;0m[4mBasic matching algorithms[0m
|
||
[38;5;12m- [39m[38;5;14m[1mIterative closest point (ICP) :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=uzOCS_gdZuM) - The must-have algorithm for feature matching applications (ICP).[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/pglira/simpleICP) - simpleICP C++ /Julia / Matlab / Octave / Python implementation.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/ethz-asl/libpointmatcher) - libpointmatcher, a modular library implementing the ICP algorithm.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://link.springer.com/content/pdf/10.1007/s10514-013-9327-2.pdf) - libpointmatcher: Comparing ICP variants on real-world data sets.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mNormal distributions transform :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=0YV4a2asb8Y) - More recent massively-parallel approach to feature matching (NDT).[39m
|
||
[38;5;12m- [39m[38;5;14m[1mKISS-ICP :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=kMMH8rA1ggI) - In Defense of Point-to-Point ICP – Simple, Accurate, and Robust Registration If Done the Right Way.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PRBonn/kiss-icp)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2209.15397.pdf)[39m
|
||
|
||
[38;2;255;187;0m[4mSemantic segmentation[0m
|
||
[38;5;12m- [39m[38;5;14m[1mRangeNet++ :newspaper:[0m[38;5;12m (https://www.ipb.uni-bonn.de/wp-content/papercite-data/pdf/milioto2019iros.pdf) - Fast and Accurate LiDAR Sematnic Segmentation with fully convolutional network.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PRBonn/rangenet_lib)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=uo3ZuLuFAzk)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPolarNet :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2003.14032.pdf) - An Improved Grid Representation for Online LiDAR Point Clouds Semantic Segmentation.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/edwardzhou130/PolarSeg)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=iIhttRSMqjE)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mFrustum PointNets :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1711.08488.pdf) - Frustum PointNets for 3D Object Detection from RGB-D Data.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/charlesq34/frustum-pointnets)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mStudy of LIDAR Semantic Segmentation[0m[38;5;12m (https://larissa.triess.eu/scan-semseg/) - Scan-based Semantic Segmentation of LiDAR Point Clouds: An Experimental Study IV 2020.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/abs/2004.11803)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (http://ltriess.github.io/scan-semseg)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLIDAR-MOS :newspaper:[0m[38;5;12m (https://www.ipb.uni-bonn.de/pdfs/chen2021ral-iros.pdf) - Moving Object Segmentation in 3D LIDAR Data[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PRBonn/LiDAR-MOS)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=NHvsYhk4dhw)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mSuperPoint Graph :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1711.09869.pdf)- Large-scale Point Cloud Semantic Segmentation with Superpoint Graphs[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PRBonn/LiDAR-MOS)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=Ijr3kGSU_tU)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mRandLA-Net :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1911.11236.pdf) - Efficient Semantic Segmentation of Large-Scale Point Clouds[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/QingyongHu/RandLA-Net)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=Ar3eY_lwzMk)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAutomatic labelling :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2108.13757.pdf) - Automatic labelling of urban point clouds using data fusion[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/Amsterdam-AI-Team/Urban_PointCloud_Processing)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=qMj_WM6D0vI)[39m
|
||
|
||
[38;2;255;187;0m[4mGround segmentation[0m
|
||
[38;5;12m- [39m[38;5;14m[1mPlane Seg :octocat:[0m[38;5;12m (https://github.com/ori-drs/plane_seg) - ROS comapatible ground plane segmentation; a library for fitting planes to LIDAR.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=YYs4lJ9t-Xo)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLineFit Graph :newspaper:[0m[38;5;12m (https://ieeexplore.ieee.org/abstract/document/5548059)- Line fitting-based fast ground segmentation for horizontal 3D LiDAR data[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/lorenwel/linefit_ground_segmentation)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPatchwork :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2108.05560.pdf)- Region-wise plane fitting-based robust and fast ground segmentation for 3D LiDAR data [39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/LimHyungTae/patchwork)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=rclqeDi4gow)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPatchwork++ :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2207.11919.pdf)- Improved version of Patchwork. Patchwork++ provides pybinding as well for deep learning users[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/url-kaist/patchwork-plusplus-ros)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=fogCM159GRk)[39m
|
||
|
||
|
||
[38;2;255;187;0m[4mSimultaneous localization and mapping SLAM and LIDAR-based odometry and or mapping LOAM[0m
|
||
[38;5;12m- [39m[38;5;14m[1mLOAM J. Zhang and S. Singh :red_circle:[0m[38;5;12m (https://youtu.be/8ezyhTAEyHs) - LOAM: Lidar Odometry and Mapping in Real-time.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLeGO-LOAM :octocat:[0m[38;5;12m (https://github.com/RobustFieldAutonomyLab/LeGO-LOAM) - A lightweight and ground optimized lidar odometry and mapping (LeGO-LOAM) system for ROS compatible UGVs. [39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=7uCxLUs9fwQ)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mCartographer[0m[38;5;14m[1m [0m[38;5;14m[1m:octocat:[0m[38;5;12m [39m[38;5;12m(https://github.com/cartographer-project/cartographer)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mCartographer[39m[38;5;12m [39m[38;5;12mis[39m[38;5;12m [39m[38;5;12mROS[39m[38;5;12m [39m[38;5;12mcompatible[39m[38;5;12m [39m[38;5;12msystem[39m[38;5;12m [39m[38;5;12mthat[39m[38;5;12m [39m[38;5;12mprovides[39m[38;5;12m [39m[38;5;12mreal-time[39m[38;5;12m [39m[38;5;12msimultaneous[39m[38;5;12m [39m[38;5;12mlocalization[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mmapping[39m[38;5;12m [39m[38;5;12m(SLAM)[39m[38;5;12m [39m[38;5;12min[39m[38;5;12m [39m[38;5;12m2D[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12macross[39m[38;5;12m [39m
|
||
[38;5;12mmultiple[39m[38;5;12m [39m[38;5;12mplatforms[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12msensor[39m[38;5;12m [39m[38;5;12mconfigurations.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=29Knm-phAyI)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mSuMa++ :newspaper:[0m[38;5;12m (http://www.ipb.uni-bonn.de/wp-content/papercite-data/pdf/chen2019iros.pdf) - LiDAR-based Semantic SLAM.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PRBonn/semantic_suma/)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://youtu.be/uo3ZuLuFAzk)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mOverlapNet :newspaper:[0m[38;5;12m (http://www.ipb.uni-bonn.de/wp-content/papercite-data/pdf/chen2020rss.pdf) - Loop Closing for LiDAR-based SLAM.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/PRBonn/OverlapNet)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=YTfliBco6aw)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mLIO-SAM :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/2007.00258.pdf) - Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/TixiaoShan/LIO-SAM)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=A0H8CoORZJU)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mRemovert :newspaper:[0m[38;5;12m (http://ras.papercept.net/images/temp/IROS/files/0855.pdf) - Remove, then Revert: Static Point cloud Map Construction using Multiresolution Range Images.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/irapkaist/removert)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=M9PEGi5fAq8)[39m
|
||
|
||
[38;2;255;187;0m[4mObject detection and object tracking[0m
|
||
[38;5;12m- [39m[38;5;14m[1mLearning to Optimally Segment Point Clouds :newspaper:[0m[38;5;12m (https://arxiv.org/abs/1912.04976) - By Peiyun Hu, David Held, and Deva Ramanan at Carnegie Mellon University. IEEE Robotics and Automation Letters, 2020.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=wLxIAwIL870)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/peiyunh/opcseg)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mLeveraging[0m[38;5;14m[1m [0m[38;5;14m[1mHeteroscedastic[0m[38;5;14m[1m [0m[38;5;14m[1mAleatoric[0m[38;5;14m[1m [0m[38;5;14m[1mUncertainties[0m[38;5;14m[1m [0m[38;5;14m[1mfor[0m[38;5;14m[1m [0m[38;5;14m[1mRobust[0m[38;5;14m[1m [0m[38;5;14m[1mReal-Time[0m[38;5;14m[1m [0m[38;5;14m[1mLiDAR[0m[38;5;14m[1m [0m[38;5;14m[1m3D[0m[38;5;14m[1m [0m[38;5;14m[1mObject[0m[38;5;14m[1m [0m[38;5;14m[1mDetection[0m[38;5;14m[1m [0m[38;5;14m[1m:newspaper:[0m[38;5;12m [39m[38;5;12m(https://arxiv.org/pdf/1809.05590.pdf)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mBy[39m[38;5;12m [39m[38;5;12mDi[39m[38;5;12m [39m[38;5;12mFeng,[39m[38;5;12m [39m[38;5;12mLars[39m[38;5;12m [39m[38;5;12mRosenbaum,[39m[38;5;12m [39m[38;5;12mFabian[39m[38;5;12m [39m[38;5;12mTimm,[39m[38;5;12m [39m[38;5;12mKlaus[39m[38;5;12m [39m[38;5;12mDietmayer.[39m[38;5;12m [39m
|
||
[38;5;12m30th[39m[38;5;12m [39m[38;5;12mIEEE[39m[38;5;12m [39m[38;5;12mIntelligent[39m[38;5;12m [39m[38;5;12mVehicles[39m[38;5;12m [39m[38;5;12mSymposium,[39m[38;5;12m [39m[38;5;12m2019.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=2DzH9COLpkU)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mWhat You See is What You Get: Exploiting Visibility for 3D Object Detection :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1912.04986.pdf) - By Peiyun Hu, Jason Ziglar, David Held, Deva Ramanan, 2019.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=497OF-otY2k)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/peiyunh/WYSIWYG)[39m
|
||
[38;5;12m- [39m[38;5;14m[1murban_road_filter :newspaper:[0m[38;5;12m (https://doi.org/10.3390/s22010194)-[39m
|
||
[38;5;12mReal-Time LIDAR-Based Urban Road and Sidewalk Detection for Autonomous Vehicles[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/jkk-research/urban_road_filter)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=T2qi4pldR-E)[39m
|
||
|
||
[38;2;255;187;0m[4mSimulators[0m
|
||
[38;5;12m- [39m[38;5;14m[1mCoppeliaSim[0m[38;5;12m (https://www.coppeliarobotics.com/coppeliaSim) - Cross-platform general-purpose robotic simulator (formerly known as V-REP).[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/user/VirtualRobotPlatform)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mOSRF Gazebo[0m[38;5;12m (http://gazebosim.org/) - OGRE-based general-purpose robotic simulator, ROS/ROS 2 compatible.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/osrf/gazebo)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mCARLA[0m[38;5;12m (https://carla.org/) - Unreal Engine based simulator for automotive applications. Compatible with Autoware, Baidu Apollo and ROS/ROS 2.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/carla-simulator/carla)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UC1llP9ekCwt8nEJzMJBQekg)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mLGSVL[0m[38;5;14m[1m [0m[38;5;14m[1m/[0m[38;5;14m[1m [0m[38;5;14m[1mSVL[0m[38;5;12m [39m[38;5;12m(https://www.lgsvlsimulator.com/)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mUnity[39m[38;5;12m [39m[38;5;12mEngine[39m[38;5;12m [39m[38;5;12mbased[39m[38;5;12m [39m[38;5;12msimulator[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12mautomotive[39m[38;5;12m [39m[38;5;12mapplications.[39m[38;5;12m [39m[38;5;12mCompatible[39m[38;5;12m [39m[38;5;12mwith[39m[38;5;12m [39m[38;5;12mAutoware,[39m[38;5;12m [39m[38;5;12mBaidu[39m[38;5;12m [39m[38;5;12mApollo[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mROS/ROS[39m[38;5;12m [39m[38;5;12m2.[39m[38;5;12m [39m[48;2;30;30;40m[38;5;13m[3mNote:[0m[38;5;12m [39m[38;5;12mLG[39m[38;5;12m [39m[38;5;12mhas[39m[38;5;12m [39m[38;5;12mmade[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12mdifficult[39m[38;5;12m [39m[38;5;12mdecision[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m
|
||
[38;5;14m[1msuspend[0m[38;5;12m [39m[38;5;12m(https://www.svlsimulator.com/news/2022-01-20-svl-simulator-sunset)[39m[38;5;12m [39m[38;5;12mactive[39m[38;5;12m [39m[38;5;12mdevelopment[39m[38;5;12m [39m[38;5;12mof[39m[38;5;12m [39m[38;5;12mSVL[39m[38;5;12m [39m[38;5;12mSimulator.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/lgsvl/simulator)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/c/LGSVLSimulator)[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mOSSDC[0m[38;5;14m[1m [0m[38;5;14m[1mSIM[0m[38;5;12m [39m[38;5;12m(https://github.com/OSSDC/OSSDC-SIM)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mUnity[39m[38;5;12m [39m[38;5;12mEngine[39m[38;5;12m [39m[38;5;12mbased[39m[38;5;12m [39m[38;5;12msimulator[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12mautomotive[39m[38;5;12m [39m[38;5;12mapplications,[39m[38;5;12m [39m[38;5;12mbased[39m[38;5;12m [39m[38;5;12mon[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12msuspended[39m[38;5;12m [39m[38;5;12mLGSVL[39m[38;5;12m [39m[38;5;12msimulator,[39m[38;5;12m [39m[38;5;12mbut[39m[38;5;12m [39m[38;5;12man[39m[38;5;12m [39m[38;5;12mactive[39m[38;5;12m [39m[38;5;12mdevelopment.[39m[38;5;12m [39m[38;5;12mCompatible[39m[38;5;12m [39m[38;5;12mwith[39m[38;5;12m [39m[38;5;12mAutoware,[39m[38;5;12m [39m[38;5;12mBaidu[39m[38;5;12m [39m
|
||
[38;5;12mApollo[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mROS/ROS[39m[38;5;12m [39m[38;5;12m2.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/OSSDC/OSSDC-SIM)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=fU_C38WEwGw)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAirSim[0m[38;5;12m (https://microsoft.github.io/AirSim) - Unreal Engine based simulator for drones and automotive. Compatible with ROS.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/microsoft/AirSim)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=gnz1X3UNM5Y)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAWSIM[0m[38;5;12m (https://tier4.github.io/AWSIM) - Unity Engine based simulator for automotive applications. Compatible with Autoware and ROS 2.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/tier4/AWSIM)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=FH7aBWDmSNA)[39m
|
||
|
||
|
||
[38;2;255;187;0m[4mRelated awesome[0m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome point cloud analysis :octocat:[0m[38;5;12m (https://github.com/Yochengliu/awesome-point-cloud-analysis#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome robotics :octocat:[0m[38;5;12m (https://github.com/Kiloreux/awesome-robotics#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome robotics libraries :octocat:[0m[38;5;12m (https://github.com/jslee02/awesome-robotics-libraries#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome ROS 2 :octocat:[0m[38;5;12m (https://github.com/fkromer/awesome-ros2#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome artificial intelligence :octocat:[0m[38;5;12m (https://github.com/owainlewis/awesome-artificial-intelligence#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome computer vision :octocat:[0m[38;5;12m (https://github.com/jbhuang0604/awesome-computer-vision#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome machine learning :octocat:[0m[38;5;12m (https://github.com/josephmisiti/awesome-machine-learning#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome deep learning :octocat:[0m[38;5;12m (https://github.com/ChristosChristofidis/awesome-deep-learning#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome reinforcement learning :octocat:[0m[38;5;12m (https://github.com/aikorea/awesome-rl/#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome SLAM datasets :octocat:[0m[38;5;12m (https://github.com/youngguncho/awesome-slam-datasets#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome electronics :octocat:[0m[38;5;12m (https://github.com/kitspace/awesome-electronics#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome vehicle security and car hacking :octocat:[0m[38;5;12m (https://github.com/jaredthecoder/awesome-vehicle-security#readme)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mAwesome LIDAR-Camera calibration :octocat:[0m[38;5;12m (https://github.com/Deephome/Awesome-LiDAR-Camera-Calibration)[39m
|
||
|
||
[38;2;255;187;0m[4mOthers[0m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mARHeadsetKit[0m[38;5;12m [39m[38;5;12m(https://github.com/philipturner/ARHeadsetKit)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mUsing[39m[38;5;12m [39m[38;5;12m$5[39m[38;5;12m [39m[38;5;12mGoogle[39m[38;5;12m [39m[38;5;12mCardboard[39m[38;5;12m [39m[38;5;12mto[39m[38;5;12m [39m[38;5;12mreplicate[39m[38;5;12m [39m[38;5;12mMicrosoft[39m[38;5;12m [39m[38;5;12mHololens.[39m[38;5;12m [39m[38;5;12mHosts[39m[38;5;12m [39m[38;5;12mthe[39m[38;5;12m [39m[38;5;12msource[39m[38;5;12m [39m[38;5;12mcode[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12mresearch[39m[38;5;12m [39m[38;5;12mon[39m[38;5;12m [39m[38;5;14m[1mscene[0m[38;5;14m[1m [0m[38;5;14m[1mcolor[0m[38;5;14m[1m [0m[38;5;14m[1mreconstruction[0m[38;5;12m [39m
|
||
[38;5;12m(https://github.com/philipturner/scene-color-reconstruction).[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPointcloudprinter :octocat:[0m[38;5;12m (https://github.com/marian42/pointcloudprinter) - A tool to turn point cloud data from aerial lidar scans into solid meshes for 3D printing.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mCloudCompare[0m[38;5;12m (https://cloudcompare.org/) - CloudCompare is a free, cross-platform point cloud editor software.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/CloudCompare)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPcx :octocat:[0m[38;5;12m (https://github.com/keijiro/Pcx) - Point cloud importer/renderer for Unity.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mBpy :octocat:[0m[38;5;12m (https://github.com/uhlik/bpy) - Point cloud importer/renderer/editor for Blender, Point Cloud visualizer.[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1mSemantic[0m[38;5;14m[1m [0m[38;5;14m[1mSegmentation[0m[38;5;14m[1m [0m[38;5;14m[1mEditor[0m[38;5;14m[1m [0m[38;5;14m[1m:octocat:[0m[38;5;12m [39m[38;5;12m(https://github.com/Hitachi-Automotive-And-Industry-Lab/semantic-segmentation-editor)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12mPoint[39m[38;5;12m [39m[38;5;12mcloud[39m[38;5;12m [39m[38;5;12mand[39m[38;5;12m [39m[38;5;12mimage[39m[38;5;12m [39m[38;5;12msemantic[39m[38;5;12m [39m[38;5;12msegmentation[39m[38;5;12m [39m[38;5;12meditor[39m[38;5;12m [39m[38;5;12mby[39m[38;5;12m [39m[38;5;12mHitachi[39m[38;5;12m [39m[38;5;12mAutomotive[39m[38;5;12m [39m[38;5;12mAnd[39m[38;5;12m [39m
|
||
[38;5;12mIndustry[39m[38;5;12m [39m[38;5;12mLaboratory,[39m[38;5;12m [39m[38;5;12mpoint[39m[38;5;12m [39m[38;5;12mcloud[39m[38;5;12m [39m[38;5;12mannotator[39m[38;5;12m [39m[38;5;12m/[39m[38;5;12m [39m[38;5;12mlabeling.[39m
|
||
[38;5;12m-[39m[38;5;12m [39m[38;5;14m[1m3D[0m[38;5;14m[1m [0m[38;5;14m[1mBounding[0m[38;5;14m[1m [0m[38;5;14m[1mBox[0m[38;5;14m[1m [0m[38;5;14m[1mAnnotation[0m[38;5;14m[1m [0m[38;5;14m[1mTool[0m[38;5;14m[1m [0m[38;5;14m[1m:octocat:[0m[38;5;12m [39m[38;5;12m(https://github.com/walzimmer/3d-bat)[39m[38;5;12m [39m[38;5;12m-[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12mBAT:[39m[38;5;12m [39m[38;5;12mA[39m[38;5;12m [39m[38;5;12mSemi-Automatic,[39m[38;5;12m [39m[38;5;12mWeb-based[39m[38;5;12m [39m[38;5;12m3D[39m[38;5;12m [39m[38;5;12mAnnotation[39m[38;5;12m [39m[38;5;12mToolbox[39m[38;5;12m [39m[38;5;12mfor[39m[38;5;12m [39m[38;5;12mFull-Surround,[39m[38;5;12m [39m[38;5;12mMulti-Modal[39m[38;5;12m [39m[38;5;12mData[39m[38;5;12m [39m[38;5;12mStreams,[39m[38;5;12m [39m[38;5;12mpoint[39m[38;5;12m [39m[38;5;12mcloud[39m[38;5;12m [39m[38;5;12mannotator[39m[38;5;12m [39m[38;5;12m/[39m[38;5;12m [39m
|
||
[38;5;12mlabeling.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mPaper :newspaper:[0m[38;5;12m (https://arxiv.org/pdf/1905.00525.pdf)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube video :red_circle:[0m[38;5;12m (https://www.youtube.com/watch?v=gSGG4Lw8BSU)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mPhotogrammetry importer :octocat:[0m[38;5;12m (https://github.com/SBCV/Blender-Addon-Photogrammetry-Importer) - Blender addon to import reconstruction results of several libraries.[39m
|
||
[38;5;12m- [39m[38;5;14m[1mFoxglove[0m[38;5;12m (https://foxglove.dev/) - Foxglove Studio is an integrated visualization and diagnosis tool for robotics, available in your browser or for download as a desktop app on Linux, Windows, and macOS.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/foxglove/studio)[39m
|
||
[38;5;12m - [39m[38;5;14m[1mYouTube channel :red_circle:[0m[38;5;12m (https://www.youtube.com/channel/UCrIbrBxb9HBAnlhbx2QycsA)[39m
|
||
[38;5;12m- [39m[38;5;14m[1mMeshLab[0m[38;5;12m (https://www.meshlab.net/) - MeshLab is an open source, portable, and extensible system for the processing and editing 3D triangular meshes and pointcloud.[39m
|
||
[38;5;12m - [39m[38;5;14m[1mGitHub repository :octocat:[0m[38;5;12m (https://github.com/cnr-isti-vclab/meshlab) [39m
|