From 2aff9b4dbe57b41fababcd1a5463634d87137c1f Mon Sep 17 00:00:00 2001 From: Yutian Chen Date: Mon, 23 Sep 2024 20:00:51 -0400 Subject: [PATCH] Update --- docs/404.html | 2 +- .../content/ConfigSpec.json | 0 .../content/CustomizeConfig.json | 0 .../content/CustomizeDataLoader.json | 0 .../content/CustomizeOptimizer.json | 0 .../content/Docs.json | 0 .../_buildManifest.js | 0 .../_ssgManifest.js | 0 .../{page-25ecf2883ddac5c9.js => page-e72dbb63cd300d60.js} | 2 +- docs/content/ConfigSpec.html | 4 ++-- docs/content/CustomizeConfig.html | 4 ++-- docs/content/CustomizeDataLoader.html | 4 ++-- docs/content/CustomizeOptimizer.html | 4 ++-- docs/content/Docs.html | 4 ++-- docs/index.html | 2 +- docs/index.txt | 4 ++-- src/app/page.tsx | 7 ++++--- 17 files changed, 19 insertions(+), 18 deletions(-) rename docs/_next/data/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/content/ConfigSpec.json (100%) rename docs/_next/data/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/content/CustomizeConfig.json (100%) rename docs/_next/data/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/content/CustomizeDataLoader.json (100%) rename docs/_next/data/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/content/CustomizeOptimizer.json (100%) rename docs/_next/data/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/content/Docs.json (100%) rename docs/_next/static/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/_buildManifest.js (100%) rename docs/_next/static/{sKG2otVmT-JSWW4yqfmjk => M8bGBKrGrPOhF9_vcPPeH}/_ssgManifest.js (100%) rename docs/_next/static/chunks/app/{page-25ecf2883ddac5c9.js => page-e72dbb63cd300d60.js} (68%) diff --git a/docs/404.html b/docs/404.html index 1cc9d68..013862c 100644 --- a/docs/404.html +++ b/docs/404.html @@ -1 +1 @@ -Not Found | MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry

Page Not Found

Back to home
\ No newline at end of file +Not Found | MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry

Page Not Found

Back to home
\ No newline at end of file diff --git a/docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/ConfigSpec.json b/docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/ConfigSpec.json similarity index 100% rename from docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/ConfigSpec.json rename to docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/ConfigSpec.json diff --git a/docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/CustomizeConfig.json b/docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/CustomizeConfig.json similarity index 100% rename from docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/CustomizeConfig.json rename to docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/CustomizeConfig.json diff --git a/docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/CustomizeDataLoader.json b/docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/CustomizeDataLoader.json similarity index 100% rename from docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/CustomizeDataLoader.json rename to docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/CustomizeDataLoader.json diff --git a/docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/CustomizeOptimizer.json b/docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/CustomizeOptimizer.json similarity index 100% rename from docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/CustomizeOptimizer.json rename to docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/CustomizeOptimizer.json diff --git a/docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/Docs.json b/docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/Docs.json similarity index 100% rename from docs/_next/data/sKG2otVmT-JSWW4yqfmjk/content/Docs.json rename to docs/_next/data/M8bGBKrGrPOhF9_vcPPeH/content/Docs.json diff --git a/docs/_next/static/sKG2otVmT-JSWW4yqfmjk/_buildManifest.js b/docs/_next/static/M8bGBKrGrPOhF9_vcPPeH/_buildManifest.js similarity index 100% rename from docs/_next/static/sKG2otVmT-JSWW4yqfmjk/_buildManifest.js rename to docs/_next/static/M8bGBKrGrPOhF9_vcPPeH/_buildManifest.js diff --git a/docs/_next/static/sKG2otVmT-JSWW4yqfmjk/_ssgManifest.js b/docs/_next/static/M8bGBKrGrPOhF9_vcPPeH/_ssgManifest.js similarity index 100% rename from docs/_next/static/sKG2otVmT-JSWW4yqfmjk/_ssgManifest.js rename to docs/_next/static/M8bGBKrGrPOhF9_vcPPeH/_ssgManifest.js diff --git a/docs/_next/static/chunks/app/page-25ecf2883ddac5c9.js b/docs/_next/static/chunks/app/page-e72dbb63cd300d60.js similarity index 68% rename from docs/_next/static/chunks/app/page-25ecf2883ddac5c9.js rename to docs/_next/static/chunks/app/page-e72dbb63cd300d60.js index db0dc57..b01ce19 100644 --- a/docs/_next/static/chunks/app/page-25ecf2883ddac5c9.js +++ b/docs/_next/static/chunks/app/page-e72dbb63cd300d60.js @@ -1 +1 @@ -(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[931],{310:function(e,t,a){Promise.resolve().then(a.bind(a,7386))},7386:function(e,t,a){"use strict";a.r(t),a.d(t,{default:function(){return V}});var r=a(9390),s=a(7281),i=a(4243),n=a(9132),l=a(965);n.z.object({NEXT_PUBLIC_SHOW_LOGGER:n.z.enum(["true","false"]).optional()}).parse(l.env);var o=function(){let[e,t]=(0,i.useState)("light");return(0,i.useEffect)(()=>{var e;let a=(e="darkMode",window.localStorage.getItem(e));null===a?t("dark"):t(a)},[]),(0,i.useEffect)(()=>{var t;t="darkMode",window.localStorage.setItem(t,e)},[e]),[e,()=>{t(e=>"light"===e?"dark":"light")}]},c=a(4395),d=function(e){let{state:t,switch_state:a,size:i="normal"}=e;return(0,r.jsx)(c.rs,{checked:t,onChange:a,className:(0,s.Z)("group inline-flex items-center rounded-full bg-gray-600 transition data-[checked]:bg-primary-500","normal"===i?"h-6 w-11":"h-4 w-9"),children:(0,r.jsx)("span",{className:(0,s.Z)("normal"===i?"size-4 group-data-[checked]:translate-x-6":"size-3 group-data-[checked]:translate-x-5","translate-x-1 rounded-full bg-white transition")})})},m=e=>{let{img_src:t,caption:a,isDark:n,idx:l}=e,o=n?"text-gray-400":"text-gray-600",[c,m]=(0,i.useState)(n?"invert":"invert-0");return(0,i.useEffect)(()=>{m(n?"invert":"invert-0")},[n]),(0,r.jsxs)("figure",{className:"flex flex-col items-center justify-center",children:[(0,r.jsx)("img",{src:t,className:(0,s.Z)("w-full h-auto rounded-md p-2 transition",c,"invert"===c?"bg-gray-200":"")}),n?(0,r.jsxs)("div",{className:(0,s.Z)(o,"text-sm","invert"===c?"text-primary-500":""),children:[(0,r.jsxs)("span",{children:["Color Inversion ","invert"===c?"ON":"OFF"," "]}),(0,r.jsx)(d,{state:"invert"===c,switch_state:()=>m("invert"===c?"invert-0":"invert"),size:"small"})]}):null,(0,r.jsxs)("figcaption",{className:(0,s.Z)(o,"mt-2","font-light"),children:["Figure ",l,". ",a]})]})},h=a(7894),u=a.n(h);function x(e){let{text:t,...a}=e,s=(0,i.useRef)();return(0,i.useEffect)(()=>{s.current&&u()(s.current,{delimiters:[{left:"$$",right:"$$",display:!0},{left:"$",right:"$",display:!1}]})},[t]),(0,r.jsx)("span",{ref:s,...a,children:t})}a(7297);var p=a(7112),g=a(4414);let f=i.forwardRef((e,t)=>{let{children:a,href:s,openNewTab:i,className:n,nextLinkProps:l,...o}=e;return(void 0!==i?i:s&&!s.startsWith("/")&&!s.startsWith("#"))?(0,r.jsx)("a",{ref:t,target:"_blank",rel:"noopener noreferrer",href:s,...o,className:(0,p.cn)("cursor-newtab",n),children:a}):(0,r.jsx)(g.default,{href:s,ref:t,className:n,...o,...l,children:a})}),b=i.forwardRef((e,t)=>{let{children:a,className:s,...i}=e;return(0,r.jsx)(f,{ref:t,...i,className:(0,p.cn)("animated-underline custom-link inline-flex items-center font-medium","focus-visible:ring-primary-500 focus:outline-none focus-visible:rounded focus-visible:ring focus-visible:ring-offset-2","border-dark border-b border-dotted hover:border-black/0",s),children:a})});function v(e){let{children:t,className:a,direction:s="right",as:i,icon:n=null,...l}=e;return(0,r.jsxs)(i||b,{...l,className:(0,p.cn)("group gap-[0.25em]","left"===s&&"flex-row-reverse",a),children:[n,(0,r.jsx)("span",{children:t}),(0,r.jsxs)("svg",{viewBox:"0 0 16 16",height:"1em",width:"1em",fill:"none",xmlns:"http://www.w3.org/2000/svg",className:(0,p.cn)("relative","transition-transform duration-200","right"===s?"motion-safe:-translate-x-1":"rotate-180","group-hover:translate-x-0"),children:[(0,r.jsx)("path",{fill:"currentColor",d:"M7.28033 3.21967C6.98744 2.92678 6.51256 2.92678 6.21967 3.21967C5.92678 3.51256 5.92678 3.98744 6.21967 4.28033L7.28033 3.21967ZM11 8L11.5303 8.53033C11.8232 8.23744 11.8232 7.76256 11.5303 7.46967L11 8ZM6.21967 11.7197C5.92678 12.0126 5.92678 12.4874 6.21967 12.7803C6.51256 13.0732 6.98744 13.0732 7.28033 12.7803L6.21967 11.7197ZM6.21967 4.28033L10.4697 8.53033L11.5303 7.46967L7.28033 3.21967L6.21967 4.28033ZM10.4697 7.46967L6.21967 11.7197L7.28033 12.7803L11.5303 8.53033L10.4697 7.46967Z"}),(0,r.jsx)("path",{stroke:"currentColor",d:"M1.75 8H11",strokeWidth:"1.5",strokeLinecap:"round",className:(0,p.cn)("origin-left transition-all duration-200","opacity-0 motion-safe:-translate-x-1","group-hover:translate-x-0 group-hover:opacity-100")})]})]})}var y=a(3068),j=a(8797),w=a(9723),N=a(2725);let k=e=>(0,r.jsx)(i.Fragment,{children:e.children});var C=(0,N.default)(()=>Promise.resolve(k),{ssr:!1}),_=e=>{let{children:t,fallback:a}=e,r=window.innerWidth,[s,n]=(0,i.useState)(r<=800);return(0,i.useEffect)(()=>{let e=()=>n(window.innerWidth<=800);return window.addEventListener("resize",e),()=>window.removeEventListener("resize",e)},[]),s?a:t},z=a(7155),M=a(1318);function Z(e){let{onClose:t,title:a,children:s=null,subtitle:i=null,download:n=null}=e;return(0,r.jsxs)(z.Vq,{open:!0,onClose:()=>t(),className:"relative z-30",children:[(0,r.jsx)(z.ZR,{transition:!0,className:"fixed inset-0 bg-gray-500 bg-opacity-25 transition-opacity data-[enter]:duration-300 data-[leave]:duration-200 data-[enter]:ease-out data-[leave]:ease-in"}),(0,r.jsx)("div",{className:"fixed inset-0 w-screen overflow-y-auto",children:(0,r.jsx)("div",{className:"flex min-h-full items-end justify-center p-4 text-center sm:items-center sm:p-0",children:(0,r.jsx)(z.EM,{transition:!0,className:"relative transform overflow-hidden rounded-lg bg-white text-left shadow-xl transition-all data-[closed]:translate-y-4 data-[closed]:opacity-0 data-[enter]:duration-300 data-[leave]:duration-200 data-[enter]:ease-out data-[leave]:ease-in sm:my-8 sm:w-full data-[closed]:sm:translate-y-0 data-[closed]:sm:scale-95",style:{maxWidth:"90vw"},children:(0,r.jsx)("div",{className:"px-4 pb-4 pt-5 sm:p-6 sm:pb-4",style:{backgroundColor:"#0d1011"},children:(0,r.jsxs)("div",{className:"mt-3 text-center sm:ml-4 sm:mt-0 sm:text-left",children:[(0,r.jsxs)(z.$N,{as:"h3",className:"flex justify-between align-middle items-center",children:[(0,r.jsx)("span",{className:"text-lg font-semibold leading-6 text-gray-300",children:a}),null===i?null:(0,r.jsx)("span",{className:"text-sm font-normal text-gray-500",children:i}),(0,r.jsxs)("div",{children:[null===n?null:(0,r.jsx)("a",{download:!0,href:n,children:(0,r.jsx)("button",{type:"button",className:"inline-flex w-full justify-center rounded-md bg-primary-600 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-primary-500 sm:ml-3 sm:w-auto",children:(0,r.jsx)(j.Z,{size:16})})}),(0,r.jsx)("button",{type:"button",onClick:t,className:"inline-flex w-full justify-center rounded-md bg-red-600 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-red-500 sm:ml-3 sm:w-auto",children:(0,r.jsx)(M.Z,{size:16})})]})]}),(0,r.jsx)("div",{className:"mt-2",children:s})]})})})})})]})}var L=e=>{let{title:t,rrd_file:a,width:s="100%",height:n,fallback_video:l}=e,[o,c]=i.useState(!1),d=l?(0,r.jsxs)("div",{children:[(0,r.jsx)("span",{className:"text-primary-400 font-light text-center",children:"Interactive 3D scene not supported on mobile device. Open this webpage on PC for better experience."}),(0,r.jsx)("video",{autoPlay:!0,loop:!0,muted:!0,className:"w-96",children:(0,r.jsx)("source",{src:l,type:"video/mp4"})})]}):(0,r.jsx)("span",{className:"text-primary-400 font-semibold",children:"Interactive 3D scene not supported on mobile device. Please open this webpage on PC."});return o?(0,r.jsx)(Z,{onClose:()=>c(!1),title:t,subtitle:"Data: "+a,download:a,children:(0,r.jsx)(_,{fallback:d,children:(0,r.jsx)(y.Z,{rrd:a,width:"100%",height:"85vh"})})}):(0,r.jsxs)("div",{className:"p-2 rounded-md",style:{backgroundColor:"#0d1011"},children:[(0,r.jsxs)("div",{className:"px-2 flex justify-between align-middle items-center pb-2",children:[(0,r.jsx)("span",{className:"font-semibold leading-6 text-gray-300",children:t}),(0,r.jsx)("span",{className:"text-sm font-normal text-gray-500 text-clip",children:a}),(0,r.jsxs)("div",{className:"flex flex-nowrap",children:[(0,r.jsx)("a",{download:!0,href:a,children:(0,r.jsx)("button",{type:"button",className:"inline-flex w-full justify-center rounded-md bg-primary-500 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-primary-600 sm:ml-3 sm:w-auto",children:(0,r.jsx)(j.Z,{size:16})})}),(0,r.jsx)("button",{type:"button",onClick:()=>c(!0),className:"inline-flex w-full justify-center rounded-md bg-teal-500 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-teal-600 sm:ml-3 sm:w-auto",children:(0,r.jsx)(w.Z,{size:16})})]})]}),(0,r.jsx)(C,{children:(0,r.jsx)(_,{fallback:d,children:(0,r.jsx)(y.Z,{rrd:a,width:s,height:n})})})]})},A=a(9298),E=a(3059);let O=i.forwardRef((e,t)=>{let{className:a,disabled:s,isLoading:i,variant:n="primary",isDarkBg:l=!1,icon:o,classNames:c,...d}=e,m=i||s;return(0,r.jsxs)("button",{ref:t,type:"button",disabled:m,className:(0,p.cn)("inline-flex items-center justify-center rounded font-medium","focus-visible:ring-primary-500 focus:outline-none focus-visible:ring","shadow-sm","transition-colors duration-75","min-h-[28px] min-w-[28px] p-1 md:min-h-[34px] md:min-w-[34px] md:p-2",["primary"===n&&["bg-primary-500 text-white","border-primary-600 border","hover:bg-primary-600 hover:text-white","active:bg-primary-700","disabled:bg-primary-700"],"outline"===n&&["text-primary-500","border-primary-500 border","hover:bg-primary-50 active:bg-primary-100 disabled:bg-primary-100",l&&"hover:bg-gray-900 active:bg-gray-800 disabled:bg-gray-800"],"ghost"===n&&["text-primary-500","shadow-none","hover:bg-primary-50 active:bg-primary-100 disabled:bg-primary-100",l&&"hover:bg-gray-900 active:bg-gray-800 disabled:bg-gray-800"],"light"===n&&["bg-white text-gray-700","border border-gray-300","hover:text-dark hover:bg-gray-100","active:bg-white/80 disabled:bg-gray-200"],"dark"===n&&["bg-gray-900 text-white","border border-gray-600","hover:bg-gray-800 active:bg-gray-700 disabled:bg-gray-700"]],"disabled:cursor-not-allowed",i&&"relative text-transparent transition-none hover:text-transparent disabled:cursor-wait",a),...d,children:[i&&(0,r.jsx)("div",{className:(0,p.cn)("absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2",{"text-white":["primary","dark"].includes(n),"text-black":["light"].includes(n),"text-primary-500":["outline","ghost"].includes(n)}),children:(0,r.jsx)(E.Cd,{className:"animate-spin"})}),o&&(0,r.jsx)(o,{size:"1em",className:(0,p.cn)(null==c?void 0:c.icon)}),d.content?d.content:null]})});var S=e=>{let{title:t,rrd_file:a,width:s="100%"}=e,[n,l]=i.useState(!1);return n?(0,r.jsx)(C,{children:(0,r.jsx)(Z,{onClose:()=>l(!1),title:t,subtitle:"Data: "+a,download:a,children:(0,r.jsx)(y.Z,{rrd:a,width:s,height:"85vh"})})}):(0,r.jsx)(O,{icon:A.Z,content:t,onClick:()=>l(!0),type:"button",variant:"outline"})};function V(){let[e,t]=o(),a="dark"===e?"text-gray-300":"text-gray-700",i="dark"===e?"bg-dark":"bg-white",n="dark"===e?"bg-dark/70":"bg-white/70",l="dark"===e?"bg-neutral-700":"bg-gray-100",c="dark"===e?"text-primary-500":"text-primary-600";return(0,r.jsxs)("main",{children:[(0,r.jsxs)("section",{className:(0,s.Z)(i,a,"relative flex items-center justify-center h-screen overflow-hidden"),children:[(0,r.jsxs)("div",{className:"absolute top-6 right-4 z-20",children:[(0,r.jsx)("span",{children:"Light Mode "}),(0,r.jsx)(d,{state:"light"===e,switch_state:t})]}),(0,r.jsxs)("div",{className:"layout z-20 relative flex min-h-screen flex-col items-center justify-center py-12 text-center",children:[(0,r.jsxs)("h1",{className:"mt-4 text-5xl",children:["MAC-VO: "," ",(0,r.jsx)("span",{className:c,children:"M"}),"etrics-",(0,r.jsx)("span",{className:c,children:"A"}),"ware "," ",(0,r.jsx)("span",{className:c,children:"C"}),"ovariance "," ","for Learning-based Stereo"," ",(0,r.jsx)("span",{className:c,children:"V"}),"isual "," ",(0,r.jsx)("span",{className:c,children:"O"}),"dometry"]}),(0,r.jsxs)("div",{className:"container py-6",children:[(0,r.jsxs)("span",{className:"text-lg font-semibold",children:["Yuheng Qiu*, Yutian Chen*, Zihao Zhang, Wenshan Wang, Sebastian Scherer",(0,r.jsx)("br",{})]}),(0,r.jsx)("span",{className:"text-lg",children:"Carnegie Mellon University"})]}),(0,r.jsxs)("div",{className:"container flex flex-row items-center space-x-8 justify-center text-lg",children:[(0,r.jsx)(v,{className:"mt-6",href:"/components",variant:e,size:"large",children:"GitHub Repo"}),(0,r.jsx)(v,{className:"mt-6",href:"https://arxiv.org/abs/2409.09479",variant:e,size:"large",children:"arXiv Page"}),(0,r.jsx)(v,{className:"mt-6",href:"/video/MACVO.mp4",variant:e,size:"large",children:"Video"}),(0,r.jsx)(v,{className:"mt-6",href:"/content/Docs",variant:e,size:"large",children:"Documentation"})]})]}),(0,r.jsx)("div",{className:(0,s.Z)("absolute w-auto min-w-full min-h-full max-w-none z-10 backdrop-blur-sm",n)}),(0,r.jsx)("div",{className:"absolute bottom-4 left-4 z-20",children:(0,r.jsx)("p",{children:"* Equal Contribution."})}),(0,r.jsxs)("video",{autoPlay:!0,loop:!0,muted:!0,className:"absolute w-auto min-w-full min-h-full max-w-none z-0",children:[(0,r.jsx)("source",{src:"/video/SLAM_on_Moon_with_cov.mp4",type:"video/mp4"}),"Your browser does not support the video tag."]})]}),(0,r.jsx)("section",{className:(0,s.Z)(l,a),children:(0,r.jsxs)("div",{className:"layout py-12",children:[(0,r.jsx)("h2",{className:"text-center pb-4",children:"Abstract"}),(0,r.jsx)("p",{className:"text-pretty",children:"We propose MAC-VO, a novel learning-based stereo VO that leverages the learned metrics-aware matching uncertainty for dual purposes: selecting keypoint and weighing the residual in pose graph optimization. Compared to traditional geometric methods prioritizing texture-affluent features like edges, our keypoint selector employs the learned uncertainty to filter out the low-quality features based on global inconsistency. In contrast to the learning-based algorithms that rely on the scale-agnostic weight matrix, we design a metrics-aware spatial covariance model to capture the spatial information during keypoint registration. Integrating this covariance model into pose graph optimization enhances the robustness and reliability of pose estimation, particularly in challenging environments with varying illumination, feature density, and motion patterns. On public benchmark datasets, MAC-VO outperforms existing VO algorithms, even some SLAM algorithms in challenging environments. The covariance-aware framework also provides valuable information about the reliability of the estimated poses, which can benefit decision-making for autonomous systems."})]})}),(0,r.jsx)("section",{className:(0,s.Z)(i,a),children:(0,r.jsxs)("div",{className:"layout py-12",children:[(0,r.jsx)("video",{controls:!0,className:"rounded-xl my-8",children:(0,r.jsx)("source",{type:"video/mp4",src:"/video/MACVO.mp4"})}),(0,r.jsx)("h2",{className:"pb-4",children:"Methods"}),(0,r.jsx)("h3",{className:"pt-4",children:"System Pipeline"}),(0,r.jsx)(m,{img_src:"/images/Methods.png",caption:"MAC-VO System pipeline. First, we use a shared matching network to estimate the depth, flow, and corresponding uncertainty. Secondly, we employ the learned uncertainty to filter out unreliable features. Lastly, we optimize the pose with the metrics-aware covariance model.",isDark:"dark"===e,idx:1}),(0,r.jsx)("h3",{className:"pt-4",children:"Metrics-Aware Spatial Covariance"}),(0,r.jsx)(m,{img_src:"/images/SpatialCovariance.png",caption:(0,r.jsxs)("span",{children:["a) Depth uncertainty estimated with the presence of matching uncertainty. b) Projecting depth and matching uncertainty on sensor plane to 3D space. c) Residual ",(0,r.jsx)(x,{text:"$\\mathcal{L}_i$"})," for pose graph optimization."]}),isDark:"dark"===e,idx:2})]})}),(0,r.jsx)("section",{className:(0,s.Z)(l,a),children:(0,r.jsxs)("div",{className:"layout py-12",children:[(0,r.jsx)("h2",{className:"pb-4",children:"Qualitative Results"}),(0,r.jsx)("div",{className:"layout py-4",children:(0,r.jsx)(L,{title:"TartanAir Abandon Factory 1",rrd_file:"https://mac-vo.github.io/rerun/TartanAir_AbandonFac_001.rrd",fallback_video:"/video/Rotate_TartanAir1.mp4",height:"50vh"})}),(0,r.jsx)("hr",{}),(0,r.jsx)("h4",{className:"py-4",children:"TartnaAir v2 Dataset"}),(0,r.jsx)(S,{title:"Map Visualization for TartanAirv2, Trajectory E002 ",rrd_file:"https://mac-vo.github.io/rerun/tensor_map_vis.rrd"}),(0,r.jsx)("h4",{className:"py-4",children:"EuRoC Dataset"}),(0,r.jsx)(S,{title:"Map Visualization for EuRoC V102",rrd_file:"https://mac-vo.github.io/rerun/EuRoC_V102_Map.rrd"}),(0,r.jsx)("h4",{className:"py-4",children:"KITTI Dataset"}),(0,r.jsx)(S,{title:"Map Visualization for KITTI Odometry 07",rrd_file:"https://mac-vo.github.io/rerun/KITTI_07_Map.rrd"})]})})]})}},7112:function(e,t,a){"use strict";a.d(t,{cn:function(){return i}});var r=a(7281),s=a(5834);function i(){for(var e=arguments.length,t=Array(e),a=0;a{var e;let a=(e="darkMode",window.localStorage.getItem(e));null===a?t("dark"):t(a)},[]),(0,i.useEffect)(()=>{var t;t="darkMode",window.localStorage.setItem(t,e)},[e]),[e,()=>{t(e=>"light"===e?"dark":"light")}]},c=a(4395),d=function(e){let{state:t,switch_state:a,size:i="normal"}=e;return(0,r.jsx)(c.rs,{checked:t,onChange:a,className:(0,s.Z)("group inline-flex items-center rounded-full bg-gray-600 transition data-[checked]:bg-primary-500","normal"===i?"h-6 w-11":"h-4 w-9"),children:(0,r.jsx)("span",{className:(0,s.Z)("normal"===i?"size-4 group-data-[checked]:translate-x-6":"size-3 group-data-[checked]:translate-x-5","translate-x-1 rounded-full bg-white transition")})})},m=e=>{let{img_src:t,caption:a,isDark:n,idx:l}=e,o=n?"text-gray-400":"text-gray-600",[c,m]=(0,i.useState)(n?"invert":"invert-0");return(0,i.useEffect)(()=>{m(n?"invert":"invert-0")},[n]),(0,r.jsxs)("figure",{className:"flex flex-col items-center justify-center",children:[(0,r.jsx)("img",{src:t,className:(0,s.Z)("w-full h-auto rounded-md p-2 transition",c,"invert"===c?"bg-gray-200":"")}),n?(0,r.jsxs)("div",{className:(0,s.Z)(o,"text-sm","invert"===c?"text-primary-500":""),children:[(0,r.jsxs)("span",{children:["Color Inversion ","invert"===c?"ON":"OFF"," "]}),(0,r.jsx)(d,{state:"invert"===c,switch_state:()=>m("invert"===c?"invert-0":"invert"),size:"small"})]}):null,(0,r.jsxs)("figcaption",{className:(0,s.Z)(o,"mt-2","font-light"),children:["Figure ",l,". ",a]})]})},h=a(7894),u=a.n(h);function x(e){let{text:t,...a}=e,s=(0,i.useRef)();return(0,i.useEffect)(()=>{s.current&&u()(s.current,{delimiters:[{left:"$$",right:"$$",display:!0},{left:"$",right:"$",display:!1}]})},[t]),(0,r.jsx)("span",{ref:s,...a,children:t})}a(7297);var p=a(7112),g=a(4414);let f=i.forwardRef((e,t)=>{let{children:a,href:s,openNewTab:i,className:n,nextLinkProps:l,...o}=e;return(void 0!==i?i:s&&!s.startsWith("/")&&!s.startsWith("#"))?(0,r.jsx)("a",{ref:t,target:"_blank",rel:"noopener noreferrer",href:s,...o,className:(0,p.cn)("cursor-newtab",n),children:a}):(0,r.jsx)(g.default,{href:s,ref:t,className:n,...o,...l,children:a})}),b=i.forwardRef((e,t)=>{let{children:a,className:s,...i}=e;return(0,r.jsx)(f,{ref:t,...i,className:(0,p.cn)("animated-underline custom-link inline-flex items-center font-medium","focus-visible:ring-primary-500 focus:outline-none focus-visible:rounded focus-visible:ring focus-visible:ring-offset-2","border-dark border-b border-dotted hover:border-black/0",s),children:a})});function v(e){let{children:t,className:a,direction:s="right",as:i,icon:n=null,...l}=e;return(0,r.jsxs)(i||b,{...l,className:(0,p.cn)("group gap-[0.25em]","left"===s&&"flex-row-reverse",a),children:[n,(0,r.jsx)("span",{children:t}),(0,r.jsxs)("svg",{viewBox:"0 0 16 16",height:"1em",width:"1em",fill:"none",xmlns:"http://www.w3.org/2000/svg",className:(0,p.cn)("relative","transition-transform duration-200","right"===s?"motion-safe:-translate-x-1":"rotate-180","group-hover:translate-x-0"),children:[(0,r.jsx)("path",{fill:"currentColor",d:"M7.28033 3.21967C6.98744 2.92678 6.51256 2.92678 6.21967 3.21967C5.92678 3.51256 5.92678 3.98744 6.21967 4.28033L7.28033 3.21967ZM11 8L11.5303 8.53033C11.8232 8.23744 11.8232 7.76256 11.5303 7.46967L11 8ZM6.21967 11.7197C5.92678 12.0126 5.92678 12.4874 6.21967 12.7803C6.51256 13.0732 6.98744 13.0732 7.28033 12.7803L6.21967 11.7197ZM6.21967 4.28033L10.4697 8.53033L11.5303 7.46967L7.28033 3.21967L6.21967 4.28033ZM10.4697 7.46967L6.21967 11.7197L7.28033 12.7803L11.5303 8.53033L10.4697 7.46967Z"}),(0,r.jsx)("path",{stroke:"currentColor",d:"M1.75 8H11",strokeWidth:"1.5",strokeLinecap:"round",className:(0,p.cn)("origin-left transition-all duration-200","opacity-0 motion-safe:-translate-x-1","group-hover:translate-x-0 group-hover:opacity-100")})]})]})}var y=a(3068),j=a(8797),w=a(9723),N=a(2725);let k=e=>(0,r.jsx)(i.Fragment,{children:e.children});var C=(0,N.default)(()=>Promise.resolve(k),{ssr:!1}),_=e=>{let{children:t,fallback:a}=e,r=window.innerWidth,[s,n]=(0,i.useState)(r<=800);return(0,i.useEffect)(()=>{let e=()=>n(window.innerWidth<=800);return window.addEventListener("resize",e),()=>window.removeEventListener("resize",e)},[]),s?a:t},M=a(7155),z=a(1318);function Z(e){let{onClose:t,title:a,children:s=null,subtitle:i=null,download:n=null}=e;return(0,r.jsxs)(M.Vq,{open:!0,onClose:()=>t(),className:"relative z-30",children:[(0,r.jsx)(M.ZR,{transition:!0,className:"fixed inset-0 bg-gray-500 bg-opacity-25 transition-opacity data-[enter]:duration-300 data-[leave]:duration-200 data-[enter]:ease-out data-[leave]:ease-in"}),(0,r.jsx)("div",{className:"fixed inset-0 w-screen overflow-y-auto",children:(0,r.jsx)("div",{className:"flex min-h-full items-end justify-center p-4 text-center sm:items-center sm:p-0",children:(0,r.jsx)(M.EM,{transition:!0,className:"relative transform overflow-hidden rounded-lg bg-white text-left shadow-xl transition-all data-[closed]:translate-y-4 data-[closed]:opacity-0 data-[enter]:duration-300 data-[leave]:duration-200 data-[enter]:ease-out data-[leave]:ease-in sm:my-8 sm:w-full data-[closed]:sm:translate-y-0 data-[closed]:sm:scale-95",style:{maxWidth:"90vw"},children:(0,r.jsx)("div",{className:"px-4 pb-4 pt-5 sm:p-6 sm:pb-4",style:{backgroundColor:"#0d1011"},children:(0,r.jsxs)("div",{className:"mt-3 text-center sm:ml-4 sm:mt-0 sm:text-left",children:[(0,r.jsxs)(M.$N,{as:"h3",className:"flex justify-between align-middle items-center",children:[(0,r.jsx)("span",{className:"text-lg font-semibold leading-6 text-gray-300",children:a}),null===i?null:(0,r.jsx)("span",{className:"text-sm font-normal text-gray-500",children:i}),(0,r.jsxs)("div",{children:[null===n?null:(0,r.jsx)("a",{download:!0,href:n,children:(0,r.jsx)("button",{type:"button",className:"inline-flex w-full justify-center rounded-md bg-primary-600 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-primary-500 sm:ml-3 sm:w-auto",children:(0,r.jsx)(j.Z,{size:16})})}),(0,r.jsx)("button",{type:"button",onClick:t,className:"inline-flex w-full justify-center rounded-md bg-red-600 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-red-500 sm:ml-3 sm:w-auto",children:(0,r.jsx)(z.Z,{size:16})})]})]}),(0,r.jsx)("div",{className:"mt-2",children:s})]})})})})})]})}var L=e=>{let{title:t,rrd_file:a,width:s="100%",height:n,fallback_video:l}=e,[o,c]=i.useState(!1),d=l?(0,r.jsxs)("div",{children:[(0,r.jsx)("span",{className:"text-primary-400 font-light text-center",children:"Interactive 3D scene not supported on mobile device. Open this webpage on PC for better experience."}),(0,r.jsx)("video",{autoPlay:!0,loop:!0,muted:!0,className:"w-96",children:(0,r.jsx)("source",{src:l,type:"video/mp4"})})]}):(0,r.jsx)("span",{className:"text-primary-400 font-semibold",children:"Interactive 3D scene not supported on mobile device. Please open this webpage on PC."});return o?(0,r.jsx)(Z,{onClose:()=>c(!1),title:t,subtitle:"Data: "+a,download:a,children:(0,r.jsx)(_,{fallback:d,children:(0,r.jsx)(y.Z,{rrd:a,width:"100%",height:"85vh"})})}):(0,r.jsxs)("div",{className:"p-2 rounded-md",style:{backgroundColor:"#0d1011"},children:[(0,r.jsxs)("div",{className:"px-2 flex justify-between align-middle items-center pb-2",children:[(0,r.jsx)("span",{className:"font-semibold leading-6 text-gray-300",children:t}),(0,r.jsx)("span",{className:"text-sm font-normal text-gray-500 text-clip",children:a}),(0,r.jsxs)("div",{className:"flex flex-nowrap",children:[(0,r.jsx)("a",{download:!0,href:a,children:(0,r.jsx)("button",{type:"button",className:"inline-flex w-full justify-center rounded-md bg-primary-500 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-primary-600 sm:ml-3 sm:w-auto",children:(0,r.jsx)(j.Z,{size:16})})}),(0,r.jsx)("button",{type:"button",onClick:()=>c(!0),className:"inline-flex w-full justify-center rounded-md bg-teal-500 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-teal-600 sm:ml-3 sm:w-auto",children:(0,r.jsx)(w.Z,{size:16})})]})]}),(0,r.jsx)(C,{children:(0,r.jsx)(_,{fallback:d,children:(0,r.jsx)(y.Z,{rrd:a,width:s,height:n})})})]})},A=a(9298),O=a(3059);let S=i.forwardRef((e,t)=>{let{className:a,disabled:s,isLoading:i,variant:n="primary",isDarkBg:l=!1,icon:o,classNames:c,...d}=e,m=i||s;return(0,r.jsxs)("button",{ref:t,type:"button",disabled:m,className:(0,p.cn)("inline-flex items-center justify-center rounded font-medium","focus-visible:ring-primary-500 focus:outline-none focus-visible:ring","shadow-sm","transition-colors duration-75","min-h-[28px] min-w-[28px] p-1 md:min-h-[34px] md:min-w-[34px] md:p-2",["primary"===n&&["bg-primary-500 text-white","border-primary-600 border","hover:bg-primary-600 hover:text-white","active:bg-primary-700","disabled:bg-primary-700"],"outline"===n&&["text-primary-500","border-primary-500 border","hover:bg-primary-50 active:bg-primary-100 disabled:bg-primary-100",l&&"hover:bg-gray-900 active:bg-gray-800 disabled:bg-gray-800"],"ghost"===n&&["text-primary-500","shadow-none","hover:bg-primary-50 active:bg-primary-100 disabled:bg-primary-100",l&&"hover:bg-gray-900 active:bg-gray-800 disabled:bg-gray-800"],"light"===n&&["bg-white text-gray-700","border border-gray-300","hover:text-dark hover:bg-gray-100","active:bg-white/80 disabled:bg-gray-200"],"dark"===n&&["bg-gray-900 text-white","border border-gray-600","hover:bg-gray-800 active:bg-gray-700 disabled:bg-gray-700"]],"disabled:cursor-not-allowed",i&&"relative text-transparent transition-none hover:text-transparent disabled:cursor-wait",a),...d,children:[i&&(0,r.jsx)("div",{className:(0,p.cn)("absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2",{"text-white":["primary","dark"].includes(n),"text-black":["light"].includes(n),"text-primary-500":["outline","ghost"].includes(n)}),children:(0,r.jsx)(O.Cd,{className:"animate-spin"})}),o&&(0,r.jsx)(o,{size:"1em",className:(0,p.cn)(null==c?void 0:c.icon)}),d.content?d.content:null]})});var E=e=>{let{title:t,rrd_file:a,width:s="100%"}=e,[n,l]=i.useState(!1);return n?(0,r.jsx)(C,{children:(0,r.jsx)(Z,{onClose:()=>l(!1),title:t,subtitle:"Data: "+a,download:a,children:(0,r.jsx)(y.Z,{rrd:a,width:s,height:"85vh"})})}):(0,r.jsx)(S,{icon:A.Z,content:t,onClick:()=>l(!0),type:"button",variant:"outline"})};function V(){let[e,t]=o(),a="dark"===e?"text-gray-300":"text-gray-700",i="dark"===e?"bg-dark":"bg-white",n="dark"===e?"bg-dark/70":"bg-white/70",l="dark"===e?"bg-neutral-700":"bg-gray-100",c="dark"===e?"text-primary-500":"text-primary-600";return(0,r.jsxs)("main",{children:[(0,r.jsxs)("section",{className:(0,s.Z)(i,a,"relative flex items-center justify-center h-screen overflow-hidden"),children:[(0,r.jsxs)("div",{className:"absolute top-6 right-4 z-20",children:[(0,r.jsx)("span",{children:"Light Mode "}),(0,r.jsx)(d,{state:"light"===e,switch_state:t})]}),(0,r.jsxs)("div",{className:"layout z-20 relative flex min-h-screen flex-col items-center justify-center py-12 text-center",children:[(0,r.jsxs)("h1",{className:"mt-4 text-5xl",children:["MAC-VO: "," ",(0,r.jsx)("span",{className:c,children:"M"}),"etrics-",(0,r.jsx)("span",{className:c,children:"A"}),"ware "," ",(0,r.jsx)("span",{className:c,children:"C"}),"ovariance "," ","for Learning-based Stereo"," ",(0,r.jsx)("span",{className:c,children:"V"}),"isual "," ",(0,r.jsx)("span",{className:c,children:"O"}),"dometry"]}),(0,r.jsxs)("div",{className:"container py-6",children:[(0,r.jsxs)("span",{className:"text-lg font-semibold",children:["Yuheng Qiu*, Yutian Chen*, Zihao Zhang, Wenshan Wang, Sebastian Scherer",(0,r.jsx)("br",{})]}),(0,r.jsx)("span",{className:"text-lg",children:"Carnegie Mellon University"})]}),(0,r.jsxs)("div",{className:"container flex flex-row items-center space-x-8 justify-center text-lg",children:[(0,r.jsx)(v,{className:"mt-6",href:"https://github.com/MAC-VO/MAC-VO",variant:e,size:"large",children:"GitHub Repo"}),(0,r.jsx)(v,{className:"mt-6",href:"https://arxiv.org/abs/2409.09479",variant:e,size:"large",children:"arXiv Page"}),(0,r.jsx)(v,{className:"mt-6",href:"/content/Docs",variant:e,size:"large",children:"Documentation"})]})]}),(0,r.jsx)("div",{className:(0,s.Z)("absolute w-auto min-w-full min-h-full max-w-none z-10 backdrop-blur-sm",n)}),(0,r.jsx)("div",{className:"absolute bottom-4 left-4 z-20",children:(0,r.jsx)("p",{children:"* Equal Contribution."})}),(0,r.jsxs)("video",{autoPlay:!0,loop:!0,muted:!0,className:"absolute w-auto min-w-full min-h-full max-w-none z-0",children:[(0,r.jsx)("source",{src:"/video/SLAM_on_Moon_with_cov.mp4",type:"video/mp4"}),"Your browser does not support the video tag."]})]}),(0,r.jsx)("section",{className:(0,s.Z)(l,a),children:(0,r.jsxs)("div",{className:"layout py-12",children:[(0,r.jsx)("h2",{className:"text-center pb-4",children:"Abstract"}),(0,r.jsx)("p",{className:"text-pretty",children:"We propose MAC-VO, a novel learning-based stereo VO that leverages the learned metrics-aware matching uncertainty for dual purposes: selecting keypoint and weighing the residual in pose graph optimization. Compared to traditional geometric methods prioritizing texture-affluent features like edges, our keypoint selector employs the learned uncertainty to filter out the low-quality features based on global inconsistency. In contrast to the learning-based algorithms that rely on the scale-agnostic weight matrix, we design a metrics-aware spatial covariance model to capture the spatial information during keypoint registration. Integrating this covariance model into pose graph optimization enhances the robustness and reliability of pose estimation, particularly in challenging environments with varying illumination, feature density, and motion patterns. On public benchmark datasets, MAC-VO outperforms existing VO algorithms, even some SLAM algorithms in challenging environments. The covariance-aware framework also provides valuable information about the reliability of the estimated poses, which can benefit decision-making for autonomous systems."})]})}),(0,r.jsx)("section",{className:(0,s.Z)(i,a),children:(0,r.jsxs)("div",{className:"layout py-12",children:[(0,r.jsx)("h2",{children:"Supplimentary Video"}),(0,r.jsx)("video",{controls:!0,className:"rounded-xl my-8",children:(0,r.jsx)("source",{type:"video/mp4",src:"/video/MACVO.mp4"})}),(0,r.jsx)("h2",{className:"pb-4",children:"Methods"}),(0,r.jsx)("h3",{className:"pt-4",children:"System Pipeline"}),(0,r.jsx)(m,{img_src:"/images/Methods.png",caption:"MAC-VO System pipeline. First, we use a shared matching network to estimate the depth, flow, and corresponding uncertainty. Secondly, we employ the learned uncertainty to filter out unreliable features. Lastly, we optimize the pose with the metrics-aware covariance model.",isDark:"dark"===e,idx:1}),(0,r.jsx)("h3",{className:"pt-4",children:"Metrics-Aware Spatial Covariance"}),(0,r.jsx)(m,{img_src:"/images/SpatialCovariance.png",caption:(0,r.jsxs)("span",{children:["a) Depth uncertainty estimated with the presence of matching uncertainty. b) Projecting depth and matching uncertainty on sensor plane to 3D space. c) Residual ",(0,r.jsx)(x,{text:"$\\mathcal{L}_i$"})," for pose graph optimization."]}),isDark:"dark"===e,idx:2})]})}),(0,r.jsx)("section",{className:(0,s.Z)(l,a),children:(0,r.jsxs)("div",{className:"layout py-12",children:[(0,r.jsx)("h2",{className:"pb-4",children:"Qualitative Results"}),(0,r.jsx)("div",{className:"layout py-4",children:(0,r.jsx)(L,{title:"TartanAir Abandon Factory 1",rrd_file:"https://mac-vo.github.io/rerun/TartanAir_AbandonFac_001.rrd",fallback_video:"/video/Rotate_TartanAir1.mp4",height:"50vh"})}),(0,r.jsx)("hr",{}),(0,r.jsx)("h4",{className:"py-4",children:"TartnaAir v2 Dataset"}),(0,r.jsx)(E,{title:"Map Visualization for TartanAirv2, Trajectory E002 ",rrd_file:"https://mac-vo.github.io/rerun/tensor_map_vis.rrd"}),(0,r.jsx)("h4",{className:"py-4",children:"EuRoC Dataset"}),(0,r.jsx)(E,{title:"Map Visualization for EuRoC V102",rrd_file:"https://mac-vo.github.io/rerun/EuRoC_V102_Map.rrd"}),(0,r.jsx)("h4",{className:"py-4",children:"KITTI Dataset"}),(0,r.jsx)(E,{title:"Map Visualization for KITTI Odometry 07",rrd_file:"https://mac-vo.github.io/rerun/KITTI_07_Map.rrd"})]})})]})}},7112:function(e,t,a){"use strict";a.d(t,{cn:function(){return i}});var r=a(7281),s=a(5834);function i(){for(var e=arguments.length,t=Array(e),a=0;a
Light Mode

Modules Config Specification

Motion Model

+
Light Mode

Modules Config Specification

Motion Model

  • Ground Truth + Random Noise

    @@ -226,4 +226,4 @@

    Frontend

    # ...
  • -
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/content/CustomizeConfig.html b/docs/content/CustomizeConfig.html index a96e7f3..7d231cf 100644 --- a/docs/content/CustomizeConfig.html +++ b/docs/content/CustomizeConfig.html @@ -1,4 +1,4 @@ -
Light Mode

Config Customization

Config Syntax & Custom Tags

+
Light Mode

Config Customization

Config Syntax & Custom Tags

We used the yaml file format for config with some slight enhancement - the !include, !include_dataset and !flatten tags.

When using Utility.Config.load_config to read yaml file, the parser will have following action when the aforementioned tags are met:

    @@ -36,4 +36,4 @@

    Module and Interface

    Interface.instantiate("implementation_class_name", *args, **kwargs)
     

    Currently Available Modules & Config Spec

    -

    See ConfigSpec.md.

\ No newline at end of file +

See ConfigSpec.md.

\ No newline at end of file diff --git a/docs/content/CustomizeDataLoader.html b/docs/content/CustomizeDataLoader.html index 472c850..e34d30b 100644 --- a/docs/content/CustomizeDataLoader.html +++ b/docs/content/CustomizeDataLoader.html @@ -1,4 +1,4 @@ -
Light Mode

Dataset Customization

Create a new Dataset (Sequence)

+
Light Mode

Dataset Customization

Create a new Dataset (Sequence)

All the DIY dataset should inherit the GenericSequence. Create one new file in under ./DataLodaer.

The GenericSequence class is an abstract base class designed to manage datasets in a structured way, enabling easy extension and integration with various types of sequence data. Below is a detailed guide on how to implement and use this class for your datasets.

from .SequenceBase import GenericSequence
@@ -69,4 +69,4 @@ 

Usage

for batch in dataloader: print(batch) -
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/content/CustomizeOptimizer.html b/docs/content/CustomizeOptimizer.html index 7741797..33a9a4c 100644 --- a/docs/content/CustomizeOptimizer.html +++ b/docs/content/CustomizeOptimizer.html @@ -1,4 +1,4 @@ -
Light Mode

Extending Optimizer in MAC-VO

The IOptimizer Interface

+
Light Mode

Extending Optimizer in MAC-VO

The IOptimizer Interface

IOptimizer is the interface for the optimizer used in MAC-VO. It is the most complex interface in this project since it allows running any optimizer in sequential/parallel mode according to the config.

The Optimizer runs in two modes but the user only need to implement a single set of interface, which contains four methods and three data (message) types

Methods

@@ -105,4 +105,4 @@

The ITransferable Interface

""" def move_self_to_local(self: S) -> S: ... def release(self) -> None: ... -
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/content/Docs.html b/docs/content/Docs.html index dd71692..50bdb86 100644 --- a/docs/content/Docs.html +++ b/docs/content/Docs.html @@ -1,4 +1,4 @@ -
Light Mode

MAC-VO Documentation

Configuration

+
Light Mode

MAC-VO Documentation

Configuration

@@ -7,4 +7,4 @@

Extending MAC-VO

  • Customizing Config
  • Customizing DataLoader
  • Customizing Optimizer
  • -
    \ No newline at end of file +
    \ No newline at end of file diff --git a/docs/index.html b/docs/index.html index ec67a36..2081772 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1 +1 @@ -MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry
    Light Mode

    MAC-VO: Metrics-Aware Covariance for Learning-based Stereo Visual Odometry

    Yuheng Qiu*, Yutian Chen*, Zihao Zhang, Wenshan Wang, Sebastian Scherer
    Carnegie Mellon University

    * Equal Contribution.

    Abstract

    We propose MAC-VO, a novel learning-based stereo VO that leverages the learned metrics-aware matching uncertainty for dual purposes: selecting keypoint and weighing the residual in pose graph optimization. Compared to traditional geometric methods prioritizing texture-affluent features like edges, our keypoint selector employs the learned uncertainty to filter out the low-quality features based on global inconsistency. In contrast to the learning-based algorithms that rely on the scale-agnostic weight matrix, we design a metrics-aware spatial covariance model to capture the spatial information during keypoint registration. Integrating this covariance model into pose graph optimization enhances the robustness and reliability of pose estimation, particularly in challenging environments with varying illumination, feature density, and motion patterns. On public benchmark datasets, MAC-VO outperforms existing VO algorithms, even some SLAM algorithms in challenging environments. The covariance-aware framework also provides valuable information about the reliability of the estimated poses, which can benefit decision-making for autonomous systems.

    Methods

    System Pipeline

    Figure 1. MAC-VO System pipeline. First, we use a shared matching network to estimate the depth, flow, and corresponding uncertainty. Secondly, we employ the learned uncertainty to filter out unreliable features. Lastly, we optimize the pose with the metrics-aware covariance model.

    Metrics-Aware Spatial Covariance

    Figure 2. a) Depth uncertainty estimated with the presence of matching uncertainty. b) Projecting depth and matching uncertainty on sensor plane to 3D space. c) Residual $\mathcal{L}_i$ for pose graph optimization.

    Qualitative Results

    TartanAir Abandon Factory 1https://mac-vo.github.io/rerun/TartanAir_AbandonFac_001.rrd

    TartnaAir v2 Dataset

    EuRoC Dataset

    KITTI Dataset

    \ No newline at end of file +MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry
    Light Mode

    MAC-VO: Metrics-Aware Covariance for Learning-based Stereo Visual Odometry

    Yuheng Qiu*, Yutian Chen*, Zihao Zhang, Wenshan Wang, Sebastian Scherer
    Carnegie Mellon University

    * Equal Contribution.

    Abstract

    We propose MAC-VO, a novel learning-based stereo VO that leverages the learned metrics-aware matching uncertainty for dual purposes: selecting keypoint and weighing the residual in pose graph optimization. Compared to traditional geometric methods prioritizing texture-affluent features like edges, our keypoint selector employs the learned uncertainty to filter out the low-quality features based on global inconsistency. In contrast to the learning-based algorithms that rely on the scale-agnostic weight matrix, we design a metrics-aware spatial covariance model to capture the spatial information during keypoint registration. Integrating this covariance model into pose graph optimization enhances the robustness and reliability of pose estimation, particularly in challenging environments with varying illumination, feature density, and motion patterns. On public benchmark datasets, MAC-VO outperforms existing VO algorithms, even some SLAM algorithms in challenging environments. The covariance-aware framework also provides valuable information about the reliability of the estimated poses, which can benefit decision-making for autonomous systems.

    Supplimentary Video

    Methods

    System Pipeline

    Figure 1. MAC-VO System pipeline. First, we use a shared matching network to estimate the depth, flow, and corresponding uncertainty. Secondly, we employ the learned uncertainty to filter out unreliable features. Lastly, we optimize the pose with the metrics-aware covariance model.

    Metrics-Aware Spatial Covariance

    Figure 2. a) Depth uncertainty estimated with the presence of matching uncertainty. b) Projecting depth and matching uncertainty on sensor plane to 3D space. c) Residual $\mathcal{L}_i$ for pose graph optimization.

    Qualitative Results

    TartanAir Abandon Factory 1https://mac-vo.github.io/rerun/TartanAir_AbandonFac_001.rrd

    TartnaAir v2 Dataset

    EuRoC Dataset

    KITTI Dataset

    \ No newline at end of file diff --git a/docs/index.txt b/docs/index.txt index 2b35460..cf9fee3 100644 --- a/docs/index.txt +++ b/docs/index.txt @@ -1,8 +1,8 @@ 2:I[486,[],"ClientPageRoot"] -3:I[7386,["531","static/chunks/3cde7dfc-e749fadf267fcb89.js","478","static/chunks/809ce7c2-c6b72ceb3175f9c8.js","876","static/chunks/876-ee3b31f42950c7b0.js","762","static/chunks/762-508864c46eae3c2e.js","931","static/chunks/app/page-25ecf2883ddac5c9.js"],"default"] +3:I[7386,["531","static/chunks/3cde7dfc-e749fadf267fcb89.js","478","static/chunks/809ce7c2-c6b72ceb3175f9c8.js","876","static/chunks/876-ee3b31f42950c7b0.js","762","static/chunks/762-508864c46eae3c2e.js","931","static/chunks/app/page-e72dbb63cd300d60.js"],"default"] 4:I[7071,[],""] 5:I[600,["648","static/chunks/ce84277d-c3f00ebc806278a6.js","876","static/chunks/876-ee3b31f42950c7b0.js","601","static/chunks/app/error-bbc408bcf8b7bae2.js"],"default"] 6:I[4008,[],""] -0:["sKG2otVmT-JSWW4yqfmjk",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}]],null],null]},[["$","html",null,{"children":["$","body",null,{"children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$5","errorStyles":[],"errorScripts":[],"template":["$","$L6",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":["$","main",null,{"children":["$","section",null,{"className":"bg-white","children":["$","div",null,{"className":"layout flex min-h-screen flex-col items-center justify-center text-center text-black","children":[["$","svg",null,{"stroke":"currentColor","fill":"currentColor","strokeWidth":"0","viewBox":"0 0 24 24","className":"drop-shadow-glow animate-flicker text-red-500","children":["$undefined",[["$","path","0",{"d":"M4.00098 20V14C4.00098 9.58172 7.5827 6 12.001 6C16.4193 6 20.001 9.58172 20.001 14V20H21.001V22H3.00098V20H4.00098ZM6.00098 14H8.00098C8.00098 11.7909 9.79184 10 12.001 10V8C8.68727 8 6.00098 10.6863 6.00098 14ZM11.001 2H13.001V5H11.001V2ZM19.7792 4.80761L21.1934 6.22183L19.0721 8.34315L17.6578 6.92893L19.7792 4.80761ZM2.80859 6.22183L4.22281 4.80761L6.34413 6.92893L4.92991 8.34315L2.80859 6.22183Z","children":"$undefined"}]]],"style":{"color":"$undefined"},"height":60,"width":60,"xmlns":"http://www.w3.org/2000/svg"}],["$","h1",null,{"className":"mt-8 text-4xl md:text-6xl","children":"Page Not Found"}],["$","a",null,{"href":"/","children":"Back to home"}]]}]}]}],"notFoundStyles":[],"styles":[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/f87fff2ab93d05a7.css","precedence":"next","crossOrigin":"$undefined"}]]}]}]}],null],null],[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/2fda1dfdf3f14d7d.css","precedence":"next","crossOrigin":"$undefined"}]],"$L7"]]]] +0:["M8bGBKrGrPOhF9_vcPPeH",[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],["",{"children":["__PAGE__",{},[["$L1",["$","$L2",null,{"props":{"params":{},"searchParams":{}},"Component":"$3"}]],null],null]},[["$","html",null,{"children":["$","body",null,{"children":["$","$L4",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$5","errorStyles":[],"errorScripts":[],"template":["$","$L6",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":["$","main",null,{"children":["$","section",null,{"className":"bg-white","children":["$","div",null,{"className":"layout flex min-h-screen flex-col items-center justify-center text-center text-black","children":[["$","svg",null,{"stroke":"currentColor","fill":"currentColor","strokeWidth":"0","viewBox":"0 0 24 24","className":"drop-shadow-glow animate-flicker text-red-500","children":["$undefined",[["$","path","0",{"d":"M4.00098 20V14C4.00098 9.58172 7.5827 6 12.001 6C16.4193 6 20.001 9.58172 20.001 14V20H21.001V22H3.00098V20H4.00098ZM6.00098 14H8.00098C8.00098 11.7909 9.79184 10 12.001 10V8C8.68727 8 6.00098 10.6863 6.00098 14ZM11.001 2H13.001V5H11.001V2ZM19.7792 4.80761L21.1934 6.22183L19.0721 8.34315L17.6578 6.92893L19.7792 4.80761ZM2.80859 6.22183L4.22281 4.80761L6.34413 6.92893L4.92991 8.34315L2.80859 6.22183Z","children":"$undefined"}]]],"style":{"color":"$undefined"},"height":60,"width":60,"xmlns":"http://www.w3.org/2000/svg"}],["$","h1",null,{"className":"mt-8 text-4xl md:text-6xl","children":"Page Not Found"}],["$","a",null,{"href":"/","children":"Back to home"}]]}]}]}],"notFoundStyles":[],"styles":[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/f87fff2ab93d05a7.css","precedence":"next","crossOrigin":"$undefined"}]]}]}]}],null],null],[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/2fda1dfdf3f14d7d.css","precedence":"next","crossOrigin":"$undefined"}]],"$L7"]]]] 7:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry"}],["$","link","3",{"rel":"manifest","href":"/favicon/site.webmanifest","crossOrigin":"use-credentials"}],["$","meta","4",{"name":"robots","content":"index, follow"}],["$","meta","5",{"property":"og:title","content":"MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry"}],["$","meta","6",{"property":"og:url","content":"https://mac-vo.github.io"}],["$","meta","7",{"property":"og:site_name","content":"MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry"}],["$","meta","8",{"property":"og:locale","content":"en_US"}],["$","meta","9",{"property":"og:image","content":"https://mac-vo.github.io/images/og.jpg"}],["$","meta","10",{"property":"og:type","content":"website"}],["$","meta","11",{"name":"twitter:card","content":"summary_large_image"}],["$","meta","12",{"name":"twitter:title","content":"MAC-VO: Metric-Aware Covariance for Learning-based Stereo Visual Odometry"}],["$","meta","13",{"name":"twitter:image","content":"https://mac-vo.github.io/images/og.jpg"}],["$","link","14",{"rel":"shortcut icon","href":"/favicon/favicon-16x16.png"}],["$","link","15",{"rel":"icon","href":"/favicon/favicon.ico"}],["$","link","16",{"rel":"apple-touch-icon","href":"/favicon/apple-touch-icon.png"}]] 1:null diff --git a/src/app/page.tsx b/src/app/page.tsx index a9f75fd..6f1fa90 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -49,15 +49,15 @@ export default function HomePage() {
    - + GitHub Repo arXiv Page - + {/* Video - + */} Documentation @@ -97,6 +97,7 @@ export default function HomePage() {
    +

    Supplimentary Video