Calib3d.html 1.1 MB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553
  1. <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
  2. <!-- NewPage -->
  3. <html lang="en">
  4. <head>
  5. <!-- Generated by javadoc (1.8.0_312) on Wed Jun 28 12:47:25 UTC 2023 -->
  6. <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  7. <title>Calib3d (OpenCV 4.8.0 Java documentation)</title>
  8. <meta name="date" content="2023-06-28">
  9. <link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
  10. <script type="text/javascript" src="../../../script.js"></script>
  11. </head>
  12. <body>
  13. <script type="text/javascript"><!--
  14. try {
  15. if (location.href.indexOf('is-external=true') == -1) {
  16. parent.document.title="Calib3d (OpenCV 4.8.0 Java documentation)";
  17. }
  18. }
  19. catch(err) {
  20. }
  21. //-->
  22. var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":9,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9,"i85":9,"i86":9,"i87":9,"i88":9,"i89":9,"i90":9,"i91":9,"i92":9,"i93":9,"i94":9,"i95":9,"i96":9,"i97":9,"i98":9,"i99":9,"i100":9,"i101":9,"i102":9,"i103":9,"i104":9,"i105":9,"i106":9,"i107":9,"i108":9,"i109":9,"i110":9,"i111":9,"i112":9,"i113":9,"i114":9,"i115":9,"i116":9,"i117":9,"i118":9,"i119":9,"i120":9,"i121":9,"i122":9,"i123":9,"i124":9,"i125":9,"i126":9,"i127":9,"i128":9,"i129":9,"i130":9,"i131":9,"i132":9,"i133":9,"i134":9,"i135":9,"i136":9,"i137":9,"i138":9,"i139":9,"i140":9,"i141":9,"i142":9,"i143":9,"i144":9,"i145":9,"i146":9,"i147":9,"i148":9,"i149":9,"i150":9,"i151":9,"i152":9,"i153":9,"i154":9,"i155":9,"i156":9,"i157":9,"i158":9,"i159":9,"i160":9,"i161":9,"i162":9,"i163":9,"i164":9,"i165":9,"i166":9,"i167":9,"i168":9,"i169":9,"i170":9,"i171":9,"i172":9,"i173":9,"i174":9,"i175":9,"i176":9,"i177":9,"i178":9,"i179":9,"i180":9,"i181":9,"i182":9,"i183":9,"i184":9,"i185":9,"i186":9,"i187":9,"i188":9,"i189":9,"i190":9,"i191":9,"i192":9,"i193":9,"i194":9,"i195":9,"i196":9,"i197":9,"i198":9,"i199":9,"i200":9,"i201":9,"i202":9,"i203":9,"i204":9,"i205":9,"i206":9,"i207":9,"i208":9,"i209":9,"i210":9,"i211":9,"i212":9,"i213":9,"i214":9,"i215":9,"i216":9,"i217":9,"i218":9,"i219":9,"i220":9,"i221":9,"i222":9,"i223":9,"i224":9,"i225":9,"i226":9,"i227":9,"i228":9,"i229":9,"i230":9,"i231":9,"i232":9,"i233":9,"i234":9,"i235":9,"i236":9,"i237":9,"i238":9};
  23. var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
  24. var altColor = "altColor";
  25. var rowColor = "rowColor";
  26. var tableTab = "tableTab";
  27. var activeTableTab = "activeTableTab";
  28. </script>
  29. <noscript>
  30. <div>JavaScript is disabled on your browser.</div>
  31. </noscript>
  32. <!-- ========= START OF TOP NAVBAR ======= -->
  33. <div class="topNav"><a name="navbar.top">
  34. <!-- -->
  35. </a>
  36. <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
  37. <a name="navbar.top.firstrow">
  38. <!-- -->
  39. </a>
  40. <ul class="navList" title="Navigation">
  41. <li><a href="../../../overview-summary.html">Overview</a></li>
  42. <li><a href="package-summary.html">Package</a></li>
  43. <li class="navBarCell1Rev">Class</li>
  44. <li><a href="package-tree.html">Tree</a></li>
  45. <li><a href="../../../index-all.html">Index</a></li>
  46. <li><a href="../../../help-doc.html">Help</a></li>
  47. </ul>
  48. <div class="aboutLanguage">
  49. <script>
  50. var url = window.location.href;
  51. var pos = url.lastIndexOf('/javadoc/');
  52. url = pos >= 0 ? (url.substring(0, pos) + '/javadoc/mymath.js') : (window.location.origin + '/mymath.js');
  53. var script = document.createElement('script');
  54. script.src = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML,' + url;
  55. document.getElementsByTagName('head')[0].appendChild(script);
  56. </script>
  57. </div>
  58. </div>
  59. <div class="subNav">
  60. <ul class="navList">
  61. <li>Prev&nbsp;Class</li>
  62. <li><a href="../../../org/opencv/calib3d/StereoBM.html" title="class in org.opencv.calib3d"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
  63. </ul>
  64. <ul class="navList">
  65. <li><a href="../../../index.html?org/opencv/calib3d/Calib3d.html" target="_top">Frames</a></li>
  66. <li><a href="Calib3d.html" target="_top">No&nbsp;Frames</a></li>
  67. </ul>
  68. <ul class="navList" id="allclasses_navbar_top">
  69. <li><a href="../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
  70. </ul>
  71. <div>
  72. <script type="text/javascript"><!--
  73. allClassesLink = document.getElementById("allclasses_navbar_top");
  74. if(window==top) {
  75. allClassesLink.style.display = "block";
  76. }
  77. else {
  78. allClassesLink.style.display = "none";
  79. }
  80. //-->
  81. </script>
  82. </div>
  83. <div>
  84. <ul class="subNavList">
  85. <li>Summary:&nbsp;</li>
  86. <li>Nested&nbsp;|&nbsp;</li>
  87. <li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li>
  88. <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
  89. <li><a href="#method.summary">Method</a></li>
  90. </ul>
  91. <ul class="subNavList">
  92. <li>Detail:&nbsp;</li>
  93. <li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li>
  94. <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
  95. <li><a href="#method.detail">Method</a></li>
  96. </ul>
  97. </div>
  98. <a name="skip.navbar.top">
  99. <!-- -->
  100. </a></div>
  101. <!-- ========= END OF TOP NAVBAR ========= -->
  102. <!-- ======== START OF CLASS DATA ======== -->
  103. <div class="header">
  104. <div class="subTitle">org.opencv.calib3d</div>
  105. <h2 title="Class Calib3d" class="title">Class Calib3d</h2>
  106. </div>
  107. <div class="contentContainer">
  108. <ul class="inheritance">
  109. <li>java.lang.Object</li>
  110. <li>
  111. <ul class="inheritance">
  112. <li>org.opencv.calib3d.Calib3d</li>
  113. </ul>
  114. </li>
  115. </ul>
  116. <div class="description">
  117. <ul class="blockList">
  118. <li class="blockList">
  119. <hr>
  120. <br>
  121. <pre>public class <span class="typeNameLabel">Calib3d</span>
  122. extends java.lang.Object</pre>
  123. </li>
  124. </ul>
  125. </div>
  126. <div class="summary">
  127. <ul class="blockList">
  128. <li class="blockList">
  129. <!-- =========== FIELD SUMMARY =========== -->
  130. <ul class="blockList">
  131. <li class="blockList"><a name="field.summary">
  132. <!-- -->
  133. </a>
  134. <h3>Field Summary</h3>
  135. <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation">
  136. <caption><span>Fields</span><span class="tabEnd">&nbsp;</span></caption>
  137. <tr>
  138. <th class="colFirst" scope="col">Modifier and Type</th>
  139. <th class="colLast" scope="col">Field and Description</th>
  140. </tr>
  141. <tr class="altColor">
  142. <td class="colFirst"><code>static int</code></td>
  143. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_ACCURACY">CALIB_CB_ACCURACY</a></span></code>&nbsp;</td>
  144. </tr>
  145. <tr class="rowColor">
  146. <td class="colFirst"><code>static int</code></td>
  147. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_ADAPTIVE_THRESH">CALIB_CB_ADAPTIVE_THRESH</a></span></code>&nbsp;</td>
  148. </tr>
  149. <tr class="altColor">
  150. <td class="colFirst"><code>static int</code></td>
  151. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_ASYMMETRIC_GRID">CALIB_CB_ASYMMETRIC_GRID</a></span></code>&nbsp;</td>
  152. </tr>
  153. <tr class="rowColor">
  154. <td class="colFirst"><code>static int</code></td>
  155. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_CLUSTERING">CALIB_CB_CLUSTERING</a></span></code>&nbsp;</td>
  156. </tr>
  157. <tr class="altColor">
  158. <td class="colFirst"><code>static int</code></td>
  159. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_EXHAUSTIVE">CALIB_CB_EXHAUSTIVE</a></span></code>&nbsp;</td>
  160. </tr>
  161. <tr class="rowColor">
  162. <td class="colFirst"><code>static int</code></td>
  163. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_FAST_CHECK">CALIB_CB_FAST_CHECK</a></span></code>&nbsp;</td>
  164. </tr>
  165. <tr class="altColor">
  166. <td class="colFirst"><code>static int</code></td>
  167. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_FILTER_QUADS">CALIB_CB_FILTER_QUADS</a></span></code>&nbsp;</td>
  168. </tr>
  169. <tr class="rowColor">
  170. <td class="colFirst"><code>static int</code></td>
  171. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_LARGER">CALIB_CB_LARGER</a></span></code>&nbsp;</td>
  172. </tr>
  173. <tr class="altColor">
  174. <td class="colFirst"><code>static int</code></td>
  175. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_MARKER">CALIB_CB_MARKER</a></span></code>&nbsp;</td>
  176. </tr>
  177. <tr class="rowColor">
  178. <td class="colFirst"><code>static int</code></td>
  179. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_NORMALIZE_IMAGE">CALIB_CB_NORMALIZE_IMAGE</a></span></code>&nbsp;</td>
  180. </tr>
  181. <tr class="altColor">
  182. <td class="colFirst"><code>static int</code></td>
  183. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_CB_SYMMETRIC_GRID">CALIB_CB_SYMMETRIC_GRID</a></span></code>&nbsp;</td>
  184. </tr>
  185. <tr class="rowColor">
  186. <td class="colFirst"><code>static int</code></td>
  187. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_ASPECT_RATIO">CALIB_FIX_ASPECT_RATIO</a></span></code>&nbsp;</td>
  188. </tr>
  189. <tr class="altColor">
  190. <td class="colFirst"><code>static int</code></td>
  191. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_FOCAL_LENGTH">CALIB_FIX_FOCAL_LENGTH</a></span></code>&nbsp;</td>
  192. </tr>
  193. <tr class="rowColor">
  194. <td class="colFirst"><code>static int</code></td>
  195. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_INTRINSIC">CALIB_FIX_INTRINSIC</a></span></code>&nbsp;</td>
  196. </tr>
  197. <tr class="altColor">
  198. <td class="colFirst"><code>static int</code></td>
  199. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_K1">CALIB_FIX_K1</a></span></code>&nbsp;</td>
  200. </tr>
  201. <tr class="rowColor">
  202. <td class="colFirst"><code>static int</code></td>
  203. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_K2">CALIB_FIX_K2</a></span></code>&nbsp;</td>
  204. </tr>
  205. <tr class="altColor">
  206. <td class="colFirst"><code>static int</code></td>
  207. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_K3">CALIB_FIX_K3</a></span></code>&nbsp;</td>
  208. </tr>
  209. <tr class="rowColor">
  210. <td class="colFirst"><code>static int</code></td>
  211. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_K4">CALIB_FIX_K4</a></span></code>&nbsp;</td>
  212. </tr>
  213. <tr class="altColor">
  214. <td class="colFirst"><code>static int</code></td>
  215. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_K5">CALIB_FIX_K5</a></span></code>&nbsp;</td>
  216. </tr>
  217. <tr class="rowColor">
  218. <td class="colFirst"><code>static int</code></td>
  219. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_K6">CALIB_FIX_K6</a></span></code>&nbsp;</td>
  220. </tr>
  221. <tr class="altColor">
  222. <td class="colFirst"><code>static int</code></td>
  223. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_PRINCIPAL_POINT">CALIB_FIX_PRINCIPAL_POINT</a></span></code>&nbsp;</td>
  224. </tr>
  225. <tr class="rowColor">
  226. <td class="colFirst"><code>static int</code></td>
  227. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_S1_S2_S3_S4">CALIB_FIX_S1_S2_S3_S4</a></span></code>&nbsp;</td>
  228. </tr>
  229. <tr class="altColor">
  230. <td class="colFirst"><code>static int</code></td>
  231. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_TANGENT_DIST">CALIB_FIX_TANGENT_DIST</a></span></code>&nbsp;</td>
  232. </tr>
  233. <tr class="rowColor">
  234. <td class="colFirst"><code>static int</code></td>
  235. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_FIX_TAUX_TAUY">CALIB_FIX_TAUX_TAUY</a></span></code>&nbsp;</td>
  236. </tr>
  237. <tr class="altColor">
  238. <td class="colFirst"><code>static int</code></td>
  239. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_HAND_EYE_ANDREFF">CALIB_HAND_EYE_ANDREFF</a></span></code>&nbsp;</td>
  240. </tr>
  241. <tr class="rowColor">
  242. <td class="colFirst"><code>static int</code></td>
  243. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_HAND_EYE_DANIILIDIS">CALIB_HAND_EYE_DANIILIDIS</a></span></code>&nbsp;</td>
  244. </tr>
  245. <tr class="altColor">
  246. <td class="colFirst"><code>static int</code></td>
  247. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_HAND_EYE_HORAUD">CALIB_HAND_EYE_HORAUD</a></span></code>&nbsp;</td>
  248. </tr>
  249. <tr class="rowColor">
  250. <td class="colFirst"><code>static int</code></td>
  251. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_HAND_EYE_PARK">CALIB_HAND_EYE_PARK</a></span></code>&nbsp;</td>
  252. </tr>
  253. <tr class="altColor">
  254. <td class="colFirst"><code>static int</code></td>
  255. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_HAND_EYE_TSAI">CALIB_HAND_EYE_TSAI</a></span></code>&nbsp;</td>
  256. </tr>
  257. <tr class="rowColor">
  258. <td class="colFirst"><code>static int</code></td>
  259. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_NINTRINSIC">CALIB_NINTRINSIC</a></span></code>&nbsp;</td>
  260. </tr>
  261. <tr class="altColor">
  262. <td class="colFirst"><code>static int</code></td>
  263. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_RATIONAL_MODEL">CALIB_RATIONAL_MODEL</a></span></code>&nbsp;</td>
  264. </tr>
  265. <tr class="rowColor">
  266. <td class="colFirst"><code>static int</code></td>
  267. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_ROBOT_WORLD_HAND_EYE_LI">CALIB_ROBOT_WORLD_HAND_EYE_LI</a></span></code>&nbsp;</td>
  268. </tr>
  269. <tr class="altColor">
  270. <td class="colFirst"><code>static int</code></td>
  271. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_ROBOT_WORLD_HAND_EYE_SHAH">CALIB_ROBOT_WORLD_HAND_EYE_SHAH</a></span></code>&nbsp;</td>
  272. </tr>
  273. <tr class="rowColor">
  274. <td class="colFirst"><code>static int</code></td>
  275. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_SAME_FOCAL_LENGTH">CALIB_SAME_FOCAL_LENGTH</a></span></code>&nbsp;</td>
  276. </tr>
  277. <tr class="altColor">
  278. <td class="colFirst"><code>static int</code></td>
  279. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_THIN_PRISM_MODEL">CALIB_THIN_PRISM_MODEL</a></span></code>&nbsp;</td>
  280. </tr>
  281. <tr class="rowColor">
  282. <td class="colFirst"><code>static int</code></td>
  283. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_TILTED_MODEL">CALIB_TILTED_MODEL</a></span></code>&nbsp;</td>
  284. </tr>
  285. <tr class="altColor">
  286. <td class="colFirst"><code>static int</code></td>
  287. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_USE_EXTRINSIC_GUESS">CALIB_USE_EXTRINSIC_GUESS</a></span></code>&nbsp;</td>
  288. </tr>
  289. <tr class="rowColor">
  290. <td class="colFirst"><code>static int</code></td>
  291. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_USE_INTRINSIC_GUESS">CALIB_USE_INTRINSIC_GUESS</a></span></code>&nbsp;</td>
  292. </tr>
  293. <tr class="altColor">
  294. <td class="colFirst"><code>static int</code></td>
  295. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_USE_LU">CALIB_USE_LU</a></span></code>&nbsp;</td>
  296. </tr>
  297. <tr class="rowColor">
  298. <td class="colFirst"><code>static int</code></td>
  299. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_USE_QR">CALIB_USE_QR</a></span></code>&nbsp;</td>
  300. </tr>
  301. <tr class="altColor">
  302. <td class="colFirst"><code>static int</code></td>
  303. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_ZERO_DISPARITY">CALIB_ZERO_DISPARITY</a></span></code>&nbsp;</td>
  304. </tr>
  305. <tr class="rowColor">
  306. <td class="colFirst"><code>static int</code></td>
  307. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CALIB_ZERO_TANGENT_DIST">CALIB_ZERO_TANGENT_DIST</a></span></code>&nbsp;</td>
  308. </tr>
  309. <tr class="altColor">
  310. <td class="colFirst"><code>static int</code></td>
  311. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CirclesGridFinderParameters_ASYMMETRIC_GRID">CirclesGridFinderParameters_ASYMMETRIC_GRID</a></span></code>&nbsp;</td>
  312. </tr>
  313. <tr class="rowColor">
  314. <td class="colFirst"><code>static int</code></td>
  315. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CirclesGridFinderParameters_SYMMETRIC_GRID">CirclesGridFinderParameters_SYMMETRIC_GRID</a></span></code>&nbsp;</td>
  316. </tr>
  317. <tr class="altColor">
  318. <td class="colFirst"><code>static int</code></td>
  319. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#COV_POLISHER">COV_POLISHER</a></span></code>&nbsp;</td>
  320. </tr>
  321. <tr class="rowColor">
  322. <td class="colFirst"><code>static int</code></td>
  323. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CV_DLS">CV_DLS</a></span></code>&nbsp;</td>
  324. </tr>
  325. <tr class="altColor">
  326. <td class="colFirst"><code>static int</code></td>
  327. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CV_EPNP">CV_EPNP</a></span></code>&nbsp;</td>
  328. </tr>
  329. <tr class="rowColor">
  330. <td class="colFirst"><code>static int</code></td>
  331. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CV_ITERATIVE">CV_ITERATIVE</a></span></code>&nbsp;</td>
  332. </tr>
  333. <tr class="altColor">
  334. <td class="colFirst"><code>static int</code></td>
  335. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CV_P3P">CV_P3P</a></span></code>&nbsp;</td>
  336. </tr>
  337. <tr class="rowColor">
  338. <td class="colFirst"><code>static int</code></td>
  339. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CvLevMarq_CALC_J">CvLevMarq_CALC_J</a></span></code>&nbsp;</td>
  340. </tr>
  341. <tr class="altColor">
  342. <td class="colFirst"><code>static int</code></td>
  343. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CvLevMarq_CHECK_ERR">CvLevMarq_CHECK_ERR</a></span></code>&nbsp;</td>
  344. </tr>
  345. <tr class="rowColor">
  346. <td class="colFirst"><code>static int</code></td>
  347. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CvLevMarq_DONE">CvLevMarq_DONE</a></span></code>&nbsp;</td>
  348. </tr>
  349. <tr class="altColor">
  350. <td class="colFirst"><code>static int</code></td>
  351. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#CvLevMarq_STARTED">CvLevMarq_STARTED</a></span></code>&nbsp;</td>
  352. </tr>
  353. <tr class="rowColor">
  354. <td class="colFirst"><code>static int</code></td>
  355. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_CHECK_COND">fisheye_CALIB_CHECK_COND</a></span></code>&nbsp;</td>
  356. </tr>
  357. <tr class="altColor">
  358. <td class="colFirst"><code>static int</code></td>
  359. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_FOCAL_LENGTH">fisheye_CALIB_FIX_FOCAL_LENGTH</a></span></code>&nbsp;</td>
  360. </tr>
  361. <tr class="rowColor">
  362. <td class="colFirst"><code>static int</code></td>
  363. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_INTRINSIC">fisheye_CALIB_FIX_INTRINSIC</a></span></code>&nbsp;</td>
  364. </tr>
  365. <tr class="altColor">
  366. <td class="colFirst"><code>static int</code></td>
  367. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_K1">fisheye_CALIB_FIX_K1</a></span></code>&nbsp;</td>
  368. </tr>
  369. <tr class="rowColor">
  370. <td class="colFirst"><code>static int</code></td>
  371. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_K2">fisheye_CALIB_FIX_K2</a></span></code>&nbsp;</td>
  372. </tr>
  373. <tr class="altColor">
  374. <td class="colFirst"><code>static int</code></td>
  375. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_K3">fisheye_CALIB_FIX_K3</a></span></code>&nbsp;</td>
  376. </tr>
  377. <tr class="rowColor">
  378. <td class="colFirst"><code>static int</code></td>
  379. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_K4">fisheye_CALIB_FIX_K4</a></span></code>&nbsp;</td>
  380. </tr>
  381. <tr class="altColor">
  382. <td class="colFirst"><code>static int</code></td>
  383. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_PRINCIPAL_POINT">fisheye_CALIB_FIX_PRINCIPAL_POINT</a></span></code>&nbsp;</td>
  384. </tr>
  385. <tr class="rowColor">
  386. <td class="colFirst"><code>static int</code></td>
  387. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_FIX_SKEW">fisheye_CALIB_FIX_SKEW</a></span></code>&nbsp;</td>
  388. </tr>
  389. <tr class="altColor">
  390. <td class="colFirst"><code>static int</code></td>
  391. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_RECOMPUTE_EXTRINSIC">fisheye_CALIB_RECOMPUTE_EXTRINSIC</a></span></code>&nbsp;</td>
  392. </tr>
  393. <tr class="rowColor">
  394. <td class="colFirst"><code>static int</code></td>
  395. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_USE_INTRINSIC_GUESS">fisheye_CALIB_USE_INTRINSIC_GUESS</a></span></code>&nbsp;</td>
  396. </tr>
  397. <tr class="altColor">
  398. <td class="colFirst"><code>static int</code></td>
  399. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_CALIB_ZERO_DISPARITY">fisheye_CALIB_ZERO_DISPARITY</a></span></code>&nbsp;</td>
  400. </tr>
  401. <tr class="rowColor">
  402. <td class="colFirst"><code>static int</code></td>
  403. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#FM_7POINT">FM_7POINT</a></span></code>&nbsp;</td>
  404. </tr>
  405. <tr class="altColor">
  406. <td class="colFirst"><code>static int</code></td>
  407. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#FM_8POINT">FM_8POINT</a></span></code>&nbsp;</td>
  408. </tr>
  409. <tr class="rowColor">
  410. <td class="colFirst"><code>static int</code></td>
  411. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#FM_LMEDS">FM_LMEDS</a></span></code>&nbsp;</td>
  412. </tr>
  413. <tr class="altColor">
  414. <td class="colFirst"><code>static int</code></td>
  415. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#FM_RANSAC">FM_RANSAC</a></span></code>&nbsp;</td>
  416. </tr>
  417. <tr class="rowColor">
  418. <td class="colFirst"><code>static int</code></td>
  419. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LMEDS">LMEDS</a></span></code>&nbsp;</td>
  420. </tr>
  421. <tr class="altColor">
  422. <td class="colFirst"><code>static int</code></td>
  423. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LOCAL_OPTIM_GC">LOCAL_OPTIM_GC</a></span></code>&nbsp;</td>
  424. </tr>
  425. <tr class="rowColor">
  426. <td class="colFirst"><code>static int</code></td>
  427. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LOCAL_OPTIM_INNER_AND_ITER_LO">LOCAL_OPTIM_INNER_AND_ITER_LO</a></span></code>&nbsp;</td>
  428. </tr>
  429. <tr class="altColor">
  430. <td class="colFirst"><code>static int</code></td>
  431. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LOCAL_OPTIM_INNER_LO">LOCAL_OPTIM_INNER_LO</a></span></code>&nbsp;</td>
  432. </tr>
  433. <tr class="rowColor">
  434. <td class="colFirst"><code>static int</code></td>
  435. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LOCAL_OPTIM_NULL">LOCAL_OPTIM_NULL</a></span></code>&nbsp;</td>
  436. </tr>
  437. <tr class="altColor">
  438. <td class="colFirst"><code>static int</code></td>
  439. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LOCAL_OPTIM_SIGMA">LOCAL_OPTIM_SIGMA</a></span></code>&nbsp;</td>
  440. </tr>
  441. <tr class="rowColor">
  442. <td class="colFirst"><code>static int</code></td>
  443. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#LSQ_POLISHER">LSQ_POLISHER</a></span></code>&nbsp;</td>
  444. </tr>
  445. <tr class="altColor">
  446. <td class="colFirst"><code>static int</code></td>
  447. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#MAGSAC">MAGSAC</a></span></code>&nbsp;</td>
  448. </tr>
  449. <tr class="rowColor">
  450. <td class="colFirst"><code>static int</code></td>
  451. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#NEIGH_FLANN_KNN">NEIGH_FLANN_KNN</a></span></code>&nbsp;</td>
  452. </tr>
  453. <tr class="altColor">
  454. <td class="colFirst"><code>static int</code></td>
  455. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#NEIGH_FLANN_RADIUS">NEIGH_FLANN_RADIUS</a></span></code>&nbsp;</td>
  456. </tr>
  457. <tr class="rowColor">
  458. <td class="colFirst"><code>static int</code></td>
  459. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#NEIGH_GRID">NEIGH_GRID</a></span></code>&nbsp;</td>
  460. </tr>
  461. <tr class="altColor">
  462. <td class="colFirst"><code>static int</code></td>
  463. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#NONE_POLISHER">NONE_POLISHER</a></span></code>&nbsp;</td>
  464. </tr>
  465. <tr class="rowColor">
  466. <td class="colFirst"><code>static int</code></td>
  467. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#PROJ_SPHERICAL_EQRECT">PROJ_SPHERICAL_EQRECT</a></span></code>&nbsp;</td>
  468. </tr>
  469. <tr class="altColor">
  470. <td class="colFirst"><code>static int</code></td>
  471. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#PROJ_SPHERICAL_ORTHO">PROJ_SPHERICAL_ORTHO</a></span></code>&nbsp;</td>
  472. </tr>
  473. <tr class="rowColor">
  474. <td class="colFirst"><code>static int</code></td>
  475. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#RANSAC">RANSAC</a></span></code>&nbsp;</td>
  476. </tr>
  477. <tr class="altColor">
  478. <td class="colFirst"><code>static int</code></td>
  479. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#RHO">RHO</a></span></code>&nbsp;</td>
  480. </tr>
  481. <tr class="rowColor">
  482. <td class="colFirst"><code>static int</code></td>
  483. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SAMPLING_NAPSAC">SAMPLING_NAPSAC</a></span></code>&nbsp;</td>
  484. </tr>
  485. <tr class="altColor">
  486. <td class="colFirst"><code>static int</code></td>
  487. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SAMPLING_PROGRESSIVE_NAPSAC">SAMPLING_PROGRESSIVE_NAPSAC</a></span></code>&nbsp;</td>
  488. </tr>
  489. <tr class="rowColor">
  490. <td class="colFirst"><code>static int</code></td>
  491. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SAMPLING_PROSAC">SAMPLING_PROSAC</a></span></code>&nbsp;</td>
  492. </tr>
  493. <tr class="altColor">
  494. <td class="colFirst"><code>static int</code></td>
  495. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SAMPLING_UNIFORM">SAMPLING_UNIFORM</a></span></code>&nbsp;</td>
  496. </tr>
  497. <tr class="rowColor">
  498. <td class="colFirst"><code>static int</code></td>
  499. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SCORE_METHOD_LMEDS">SCORE_METHOD_LMEDS</a></span></code>&nbsp;</td>
  500. </tr>
  501. <tr class="altColor">
  502. <td class="colFirst"><code>static int</code></td>
  503. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SCORE_METHOD_MAGSAC">SCORE_METHOD_MAGSAC</a></span></code>&nbsp;</td>
  504. </tr>
  505. <tr class="rowColor">
  506. <td class="colFirst"><code>static int</code></td>
  507. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SCORE_METHOD_MSAC">SCORE_METHOD_MSAC</a></span></code>&nbsp;</td>
  508. </tr>
  509. <tr class="altColor">
  510. <td class="colFirst"><code>static int</code></td>
  511. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SCORE_METHOD_RANSAC">SCORE_METHOD_RANSAC</a></span></code>&nbsp;</td>
  512. </tr>
  513. <tr class="rowColor">
  514. <td class="colFirst"><code>static int</code></td>
  515. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_AP3P">SOLVEPNP_AP3P</a></span></code>&nbsp;</td>
  516. </tr>
  517. <tr class="altColor">
  518. <td class="colFirst"><code>static int</code></td>
  519. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_DLS">SOLVEPNP_DLS</a></span></code>&nbsp;</td>
  520. </tr>
  521. <tr class="rowColor">
  522. <td class="colFirst"><code>static int</code></td>
  523. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_EPNP">SOLVEPNP_EPNP</a></span></code>&nbsp;</td>
  524. </tr>
  525. <tr class="altColor">
  526. <td class="colFirst"><code>static int</code></td>
  527. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_IPPE">SOLVEPNP_IPPE</a></span></code>&nbsp;</td>
  528. </tr>
  529. <tr class="rowColor">
  530. <td class="colFirst"><code>static int</code></td>
  531. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_IPPE_SQUARE">SOLVEPNP_IPPE_SQUARE</a></span></code>&nbsp;</td>
  532. </tr>
  533. <tr class="altColor">
  534. <td class="colFirst"><code>static int</code></td>
  535. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_ITERATIVE">SOLVEPNP_ITERATIVE</a></span></code>&nbsp;</td>
  536. </tr>
  537. <tr class="rowColor">
  538. <td class="colFirst"><code>static int</code></td>
  539. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_MAX_COUNT">SOLVEPNP_MAX_COUNT</a></span></code>&nbsp;</td>
  540. </tr>
  541. <tr class="altColor">
  542. <td class="colFirst"><code>static int</code></td>
  543. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_P3P">SOLVEPNP_P3P</a></span></code>&nbsp;</td>
  544. </tr>
  545. <tr class="rowColor">
  546. <td class="colFirst"><code>static int</code></td>
  547. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_SQPNP">SOLVEPNP_SQPNP</a></span></code>&nbsp;</td>
  548. </tr>
  549. <tr class="altColor">
  550. <td class="colFirst"><code>static int</code></td>
  551. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#SOLVEPNP_UPNP">SOLVEPNP_UPNP</a></span></code>&nbsp;</td>
  552. </tr>
  553. <tr class="rowColor">
  554. <td class="colFirst"><code>static int</code></td>
  555. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_ACCURATE">USAC_ACCURATE</a></span></code>&nbsp;</td>
  556. </tr>
  557. <tr class="altColor">
  558. <td class="colFirst"><code>static int</code></td>
  559. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_DEFAULT">USAC_DEFAULT</a></span></code>&nbsp;</td>
  560. </tr>
  561. <tr class="rowColor">
  562. <td class="colFirst"><code>static int</code></td>
  563. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_FAST">USAC_FAST</a></span></code>&nbsp;</td>
  564. </tr>
  565. <tr class="altColor">
  566. <td class="colFirst"><code>static int</code></td>
  567. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_FM_8PTS">USAC_FM_8PTS</a></span></code>&nbsp;</td>
  568. </tr>
  569. <tr class="rowColor">
  570. <td class="colFirst"><code>static int</code></td>
  571. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_MAGSAC">USAC_MAGSAC</a></span></code>&nbsp;</td>
  572. </tr>
  573. <tr class="altColor">
  574. <td class="colFirst"><code>static int</code></td>
  575. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_PARALLEL">USAC_PARALLEL</a></span></code>&nbsp;</td>
  576. </tr>
  577. <tr class="rowColor">
  578. <td class="colFirst"><code>static int</code></td>
  579. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#USAC_PROSAC">USAC_PROSAC</a></span></code>&nbsp;</td>
  580. </tr>
  581. </table>
  582. </li>
  583. </ul>
  584. <!-- ======== CONSTRUCTOR SUMMARY ======== -->
  585. <ul class="blockList">
  586. <li class="blockList"><a name="constructor.summary">
  587. <!-- -->
  588. </a>
  589. <h3>Constructor Summary</h3>
  590. <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
  591. <caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
  592. <tr>
  593. <th class="colOne" scope="col">Constructor and Description</th>
  594. </tr>
  595. <tr class="altColor">
  596. <td class="colOne"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#Calib3d--">Calib3d</a></span>()</code>&nbsp;</td>
  597. </tr>
  598. </table>
  599. </li>
  600. </ul>
  601. <!-- ========== METHOD SUMMARY =========== -->
  602. <ul class="blockList">
  603. <li class="blockList"><a name="method.summary">
  604. <!-- -->
  605. </a>
  606. <h3>Method Summary</h3>
  607. <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
  608. <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
  609. <tr>
  610. <th class="colFirst" scope="col">Modifier and Type</th>
  611. <th class="colLast" scope="col">Method and Description</th>
  612. </tr>
  613. <tr id="i0" class="altColor">
  614. <td class="colFirst"><code>static double</code></td>
  615. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCamera-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">calibrateCamera</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  616. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  617. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  618. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  619. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  620. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  621. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</code>&nbsp;</td>
  622. </tr>
  623. <tr id="i1" class="rowColor">
  624. <td class="colFirst"><code>static double</code></td>
  625. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCamera-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">calibrateCamera</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  626. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  627. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  628. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  629. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  630. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  631. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  632. int&nbsp;flags)</code>&nbsp;</td>
  633. </tr>
  634. <tr id="i2" class="altColor">
  635. <td class="colFirst"><code>static double</code></td>
  636. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCamera-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-org.opencv.core.TermCriteria-">calibrateCamera</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  637. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  638. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  639. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  640. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  641. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  642. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  643. int&nbsp;flags,
  644. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>&nbsp;</td>
  645. </tr>
  646. <tr id="i3" class="rowColor">
  647. <td class="colFirst"><code>static double</code></td>
  648. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraExtended-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">calibrateCameraExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  649. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  650. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  651. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  652. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  653. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  654. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  655. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  656. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  657. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</code>
  658. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration
  659. pattern.</div>
  660. </td>
  661. </tr>
  662. <tr id="i4" class="altColor">
  663. <td class="colFirst"><code>static double</code></td>
  664. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraExtended-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">calibrateCameraExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  665. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  666. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  667. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  668. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  669. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  670. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  671. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  672. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  673. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  674. int&nbsp;flags)</code>
  675. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration
  676. pattern.</div>
  677. </td>
  678. </tr>
  679. <tr id="i5" class="rowColor">
  680. <td class="colFirst"><code>static double</code></td>
  681. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraExtended-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">calibrateCameraExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  682. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  683. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  684. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  685. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  686. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  687. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  688. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  689. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  690. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  691. int&nbsp;flags,
  692. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  693. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration
  694. pattern.</div>
  695. </td>
  696. </tr>
  697. <tr id="i6" class="altColor">
  698. <td class="colFirst"><code>static double</code></td>
  699. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraRO-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-">calibrateCameraRO</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  700. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  701. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  702. int&nbsp;iFixedPoint,
  703. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  704. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  705. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  706. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  707. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints)</code>&nbsp;</td>
  708. </tr>
  709. <tr id="i7" class="rowColor">
  710. <td class="colFirst"><code>static double</code></td>
  711. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraRO-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-">calibrateCameraRO</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  712. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  713. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  714. int&nbsp;iFixedPoint,
  715. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  716. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  717. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  718. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  719. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  720. int&nbsp;flags)</code>&nbsp;</td>
  721. </tr>
  722. <tr id="i8" class="altColor">
  723. <td class="colFirst"><code>static double</code></td>
  724. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraRO-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">calibrateCameraRO</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  725. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  726. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  727. int&nbsp;iFixedPoint,
  728. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  729. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  730. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  731. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  732. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  733. int&nbsp;flags,
  734. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>&nbsp;</td>
  735. </tr>
  736. <tr id="i9" class="rowColor">
  737. <td class="colFirst"><code>static double</code></td>
  738. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraROExtended-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">calibrateCameraROExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  739. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  740. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  741. int&nbsp;iFixedPoint,
  742. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  743. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  744. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  745. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  746. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  747. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  748. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  749. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsObjPoints,
  750. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</code>
  751. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.</div>
  752. </td>
  753. </tr>
  754. <tr id="i10" class="altColor">
  755. <td class="colFirst"><code>static double</code></td>
  756. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraROExtended-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">calibrateCameraROExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  757. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  758. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  759. int&nbsp;iFixedPoint,
  760. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  761. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  762. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  763. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  764. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  765. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  766. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  767. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsObjPoints,
  768. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  769. int&nbsp;flags)</code>
  770. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.</div>
  771. </td>
  772. </tr>
  773. <tr id="i11" class="rowColor">
  774. <td class="colFirst"><code>static double</code></td>
  775. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateCameraROExtended-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">calibrateCameraROExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  776. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  777. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  778. int&nbsp;iFixedPoint,
  779. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  780. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  781. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  782. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  783. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  784. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  785. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  786. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsObjPoints,
  787. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  788. int&nbsp;flags,
  789. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  790. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.</div>
  791. </td>
  792. </tr>
  793. <tr id="i12" class="altColor">
  794. <td class="colFirst"><code>static void</code></td>
  795. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-">calibrateHandEye</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_gripper2base,
  796. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_gripper2base,
  797. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_target2cam,
  798. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_target2cam,
  799. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_cam2gripper,
  800. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_cam2gripper)</code>
  801. <div class="block">Computes Hand-Eye calibration: \(_{}^{g}\textrm{T}_c\)</div>
  802. </td>
  803. </tr>
  804. <tr id="i13" class="rowColor">
  805. <td class="colFirst"><code>static void</code></td>
  806. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-int-">calibrateHandEye</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_gripper2base,
  807. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_gripper2base,
  808. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_target2cam,
  809. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_target2cam,
  810. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_cam2gripper,
  811. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_cam2gripper,
  812. int&nbsp;method)</code>
  813. <div class="block">Computes Hand-Eye calibration: \(_{}^{g}\textrm{T}_c\)</div>
  814. </td>
  815. </tr>
  816. <tr id="i14" class="altColor">
  817. <td class="colFirst"><code>static void</code></td>
  818. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateRobotWorldHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">calibrateRobotWorldHandEye</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_world2cam,
  819. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_world2cam,
  820. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_base2gripper,
  821. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_base2gripper,
  822. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_base2world,
  823. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_base2world,
  824. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_gripper2cam,
  825. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_gripper2cam)</code>
  826. <div class="block">Computes Robot-World/Hand-Eye calibration: \(_{}^{w}\textrm{T}_b\) and \(_{}^{c}\textrm{T}_g\)</div>
  827. </td>
  828. </tr>
  829. <tr id="i15" class="rowColor">
  830. <td class="colFirst"><code>static void</code></td>
  831. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrateRobotWorldHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">calibrateRobotWorldHandEye</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_world2cam,
  832. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_world2cam,
  833. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_base2gripper,
  834. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_base2gripper,
  835. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_base2world,
  836. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_base2world,
  837. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_gripper2cam,
  838. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_gripper2cam,
  839. int&nbsp;method)</code>
  840. <div class="block">Computes Robot-World/Hand-Eye calibration: \(_{}^{w}\textrm{T}_b\) and \(_{}^{c}\textrm{T}_g\)</div>
  841. </td>
  842. </tr>
  843. <tr id="i16" class="altColor">
  844. <td class="colFirst"><code>static void</code></td>
  845. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#calibrationMatrixValues-org.opencv.core.Mat-org.opencv.core.Size-double-double-double:A-double:A-double:A-org.opencv.core.Point-double:A-">calibrationMatrixValues</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  846. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  847. double&nbsp;apertureWidth,
  848. double&nbsp;apertureHeight,
  849. double[]&nbsp;fovx,
  850. double[]&nbsp;fovy,
  851. double[]&nbsp;focalLength,
  852. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;principalPoint,
  853. double[]&nbsp;aspectRatio)</code>
  854. <div class="block">Computes useful camera characteristics from the camera intrinsic matrix.</div>
  855. </td>
  856. </tr>
  857. <tr id="i17" class="rowColor">
  858. <td class="colFirst"><code>static boolean</code></td>
  859. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#checkChessboard-org.opencv.core.Mat-org.opencv.core.Size-">checkChessboard</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  860. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size)</code>&nbsp;</td>
  861. </tr>
  862. <tr id="i18" class="altColor">
  863. <td class="colFirst"><code>static void</code></td>
  864. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  865. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  866. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  867. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  868. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  869. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3)</code>
  870. <div class="block">Combines two rotation-and-shift transformations.</div>
  871. </td>
  872. </tr>
  873. <tr id="i19" class="rowColor">
  874. <td class="colFirst"><code>static void</code></td>
  875. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  876. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  877. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  878. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  879. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  880. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  881. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1)</code>
  882. <div class="block">Combines two rotation-and-shift transformations.</div>
  883. </td>
  884. </tr>
  885. <tr id="i20" class="altColor">
  886. <td class="colFirst"><code>static void</code></td>
  887. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  888. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  889. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  890. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  891. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  892. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  893. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  894. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1)</code>
  895. <div class="block">Combines two rotation-and-shift transformations.</div>
  896. </td>
  897. </tr>
  898. <tr id="i21" class="rowColor">
  899. <td class="colFirst"><code>static void</code></td>
  900. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  901. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  902. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  903. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  904. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  905. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  906. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  907. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  908. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2)</code>
  909. <div class="block">Combines two rotation-and-shift transformations.</div>
  910. </td>
  911. </tr>
  912. <tr id="i22" class="altColor">
  913. <td class="colFirst"><code>static void</code></td>
  914. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  915. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  916. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  917. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  918. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  919. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  920. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  921. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  922. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  923. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2)</code>
  924. <div class="block">Combines two rotation-and-shift transformations.</div>
  925. </td>
  926. </tr>
  927. <tr id="i23" class="rowColor">
  928. <td class="colFirst"><code>static void</code></td>
  929. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  930. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  931. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  932. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  933. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  934. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  935. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  936. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  937. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  938. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  939. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1)</code>
  940. <div class="block">Combines two rotation-and-shift transformations.</div>
  941. </td>
  942. </tr>
  943. <tr id="i24" class="altColor">
  944. <td class="colFirst"><code>static void</code></td>
  945. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  946. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  947. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  948. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  949. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  950. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  951. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  952. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  953. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  954. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  955. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1,
  956. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt1)</code>
  957. <div class="block">Combines two rotation-and-shift transformations.</div>
  958. </td>
  959. </tr>
  960. <tr id="i25" class="rowColor">
  961. <td class="colFirst"><code>static void</code></td>
  962. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  963. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  964. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  965. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  966. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  967. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  968. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  969. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  970. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  971. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  972. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1,
  973. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt1,
  974. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr2)</code>
  975. <div class="block">Combines two rotation-and-shift transformations.</div>
  976. </td>
  977. </tr>
  978. <tr id="i26" class="altColor">
  979. <td class="colFirst"><code>static void</code></td>
  980. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">composeRT</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  981. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  982. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  983. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  984. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  985. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  986. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  987. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  988. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  989. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  990. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1,
  991. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt1,
  992. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr2,
  993. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt2)</code>
  994. <div class="block">Combines two rotation-and-shift transformations.</div>
  995. </td>
  996. </tr>
  997. <tr id="i27" class="rowColor">
  998. <td class="colFirst"><code>static void</code></td>
  999. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#computeCorrespondEpilines-org.opencv.core.Mat-int-org.opencv.core.Mat-org.opencv.core.Mat-">computeCorrespondEpilines</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points,
  1000. int&nbsp;whichImage,
  1001. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  1002. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;lines)</code>
  1003. <div class="block">For points in an image of a stereo pair, computes the corresponding epilines in the other image.</div>
  1004. </td>
  1005. </tr>
  1006. <tr id="i28" class="altColor">
  1007. <td class="colFirst"><code>static void</code></td>
  1008. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#convertPointsFromHomogeneous-org.opencv.core.Mat-org.opencv.core.Mat-">convertPointsFromHomogeneous</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1009. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</code>
  1010. <div class="block">Converts points from homogeneous to Euclidean space.</div>
  1011. </td>
  1012. </tr>
  1013. <tr id="i29" class="rowColor">
  1014. <td class="colFirst"><code>static void</code></td>
  1015. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#convertPointsToHomogeneous-org.opencv.core.Mat-org.opencv.core.Mat-">convertPointsToHomogeneous</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1016. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</code>
  1017. <div class="block">Converts points from Euclidean to homogeneous space.</div>
  1018. </td>
  1019. </tr>
  1020. <tr id="i30" class="altColor">
  1021. <td class="colFirst"><code>static void</code></td>
  1022. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#correctMatches-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">correctMatches</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  1023. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1024. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1025. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newPoints1,
  1026. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newPoints2)</code>
  1027. <div class="block">Refines coordinates of corresponding points.</div>
  1028. </td>
  1029. </tr>
  1030. <tr id="i31" class="rowColor">
  1031. <td class="colFirst"><code>static void</code></td>
  1032. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">decomposeEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  1033. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  1034. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  1035. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</code>
  1036. <div class="block">Decompose an essential matrix to possible rotations and translation.</div>
  1037. </td>
  1038. </tr>
  1039. <tr id="i32" class="altColor">
  1040. <td class="colFirst"><code>static int</code></td>
  1041. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeHomographyMat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-java.util.List-">decomposeHomographyMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H,
  1042. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1043. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rotations,
  1044. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;translations,
  1045. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;normals)</code>
  1046. <div class="block">Decompose a homography matrix to rotation(s), translation(s) and plane normal(s).</div>
  1047. </td>
  1048. </tr>
  1049. <tr id="i33" class="rowColor">
  1050. <td class="colFirst"><code>static void</code></td>
  1051. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">decomposeProjectionMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  1052. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1053. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  1054. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect)</code>
  1055. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  1056. </td>
  1057. </tr>
  1058. <tr id="i34" class="altColor">
  1059. <td class="colFirst"><code>static void</code></td>
  1060. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">decomposeProjectionMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  1061. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1062. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  1063. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  1064. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX)</code>
  1065. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  1066. </td>
  1067. </tr>
  1068. <tr id="i35" class="rowColor">
  1069. <td class="colFirst"><code>static void</code></td>
  1070. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">decomposeProjectionMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  1071. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1072. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  1073. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  1074. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX,
  1075. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixY)</code>
  1076. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  1077. </td>
  1078. </tr>
  1079. <tr id="i36" class="altColor">
  1080. <td class="colFirst"><code>static void</code></td>
  1081. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">decomposeProjectionMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  1082. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1083. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  1084. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  1085. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX,
  1086. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixY,
  1087. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixZ)</code>
  1088. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  1089. </td>
  1090. </tr>
  1091. <tr id="i37" class="rowColor">
  1092. <td class="colFirst"><code>static void</code></td>
  1093. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">decomposeProjectionMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  1094. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1095. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  1096. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  1097. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX,
  1098. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixY,
  1099. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixZ,
  1100. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;eulerAngles)</code>
  1101. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  1102. </td>
  1103. </tr>
  1104. <tr id="i38" class="altColor">
  1105. <td class="colFirst"><code>static void</code></td>
  1106. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#drawChessboardCorners-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.MatOfPoint2f-boolean-">drawChessboardCorners</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1107. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1108. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;corners,
  1109. boolean&nbsp;patternWasFound)</code>
  1110. <div class="block">Renders the detected chessboard corners.</div>
  1111. </td>
  1112. </tr>
  1113. <tr id="i39" class="rowColor">
  1114. <td class="colFirst"><code>static void</code></td>
  1115. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#drawFrameAxes-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-float-">drawFrameAxes</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1116. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1117. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  1118. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  1119. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  1120. float&nbsp;length)</code>
  1121. <div class="block">Draw axes of the world/object coordinate system from pose estimation.</div>
  1122. </td>
  1123. </tr>
  1124. <tr id="i40" class="altColor">
  1125. <td class="colFirst"><code>static void</code></td>
  1126. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#drawFrameAxes-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-float-int-">drawFrameAxes</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1127. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1128. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  1129. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  1130. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  1131. float&nbsp;length,
  1132. int&nbsp;thickness)</code>
  1133. <div class="block">Draw axes of the world/object coordinate system from pose estimation.</div>
  1134. </td>
  1135. </tr>
  1136. <tr id="i41" class="rowColor">
  1137. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1138. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1139. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to)</code>
  1140. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1141. </td>
  1142. </tr>
  1143. <tr id="i42" class="altColor">
  1144. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1145. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1146. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1147. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</code>
  1148. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1149. </td>
  1150. </tr>
  1151. <tr id="i43" class="rowColor">
  1152. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1153. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1154. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1155. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1156. int&nbsp;method)</code>
  1157. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1158. </td>
  1159. </tr>
  1160. <tr id="i44" class="altColor">
  1161. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1162. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1163. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1164. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1165. int&nbsp;method,
  1166. double&nbsp;ransacReprojThreshold)</code>
  1167. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1168. </td>
  1169. </tr>
  1170. <tr id="i45" class="rowColor">
  1171. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1172. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1173. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1174. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1175. int&nbsp;method,
  1176. double&nbsp;ransacReprojThreshold,
  1177. long&nbsp;maxIters)</code>
  1178. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1179. </td>
  1180. </tr>
  1181. <tr id="i46" class="altColor">
  1182. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1183. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1184. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1185. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1186. int&nbsp;method,
  1187. double&nbsp;ransacReprojThreshold,
  1188. long&nbsp;maxIters,
  1189. double&nbsp;confidence)</code>
  1190. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1191. </td>
  1192. </tr>
  1193. <tr id="i47" class="rowColor">
  1194. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1195. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-long-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1196. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1197. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1198. int&nbsp;method,
  1199. double&nbsp;ransacReprojThreshold,
  1200. long&nbsp;maxIters,
  1201. double&nbsp;confidence,
  1202. long&nbsp;refineIters)</code>
  1203. <div class="block">Computes an optimal affine transformation between two 2D point sets.</div>
  1204. </td>
  1205. </tr>
  1206. <tr id="i48" class="altColor">
  1207. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1208. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">estimateAffine2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pts1,
  1209. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pts2,
  1210. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1211. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</code>&nbsp;</td>
  1212. </tr>
  1213. <tr id="i49" class="rowColor">
  1214. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1215. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-">estimateAffine3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1216. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</code>
  1217. <div class="block">Computes an optimal affine transformation between two 3D point sets.</div>
  1218. </td>
  1219. </tr>
  1220. <tr id="i50" class="altColor">
  1221. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1222. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-double:A-">estimateAffine3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1223. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1224. double[]&nbsp;scale)</code>
  1225. <div class="block">Computes an optimal affine transformation between two 3D point sets.</div>
  1226. </td>
  1227. </tr>
  1228. <tr id="i51" class="rowColor">
  1229. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1230. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-double:A-boolean-">estimateAffine3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1231. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1232. double[]&nbsp;scale,
  1233. boolean&nbsp;force_rotation)</code>
  1234. <div class="block">Computes an optimal affine transformation between two 3D point sets.</div>
  1235. </td>
  1236. </tr>
  1237. <tr id="i52" class="altColor">
  1238. <td class="colFirst"><code>static int</code></td>
  1239. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">estimateAffine3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1240. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1241. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  1242. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</code>
  1243. <div class="block">Computes an optimal affine transformation between two 3D point sets.</div>
  1244. </td>
  1245. </tr>
  1246. <tr id="i53" class="rowColor">
  1247. <td class="colFirst"><code>static int</code></td>
  1248. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">estimateAffine3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1249. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1250. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  1251. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1252. double&nbsp;ransacThreshold)</code>
  1253. <div class="block">Computes an optimal affine transformation between two 3D point sets.</div>
  1254. </td>
  1255. </tr>
  1256. <tr id="i54" class="altColor">
  1257. <td class="colFirst"><code>static int</code></td>
  1258. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-double-">estimateAffine3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1259. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1260. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  1261. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1262. double&nbsp;ransacThreshold,
  1263. double&nbsp;confidence)</code>
  1264. <div class="block">Computes an optimal affine transformation between two 3D point sets.</div>
  1265. </td>
  1266. </tr>
  1267. <tr id="i55" class="rowColor">
  1268. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1269. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1270. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to)</code>
  1271. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1272. two 2D point sets.</div>
  1273. </td>
  1274. </tr>
  1275. <tr id="i56" class="altColor">
  1276. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1277. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1278. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1279. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</code>
  1280. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1281. two 2D point sets.</div>
  1282. </td>
  1283. </tr>
  1284. <tr id="i57" class="rowColor">
  1285. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1286. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1287. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1288. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1289. int&nbsp;method)</code>
  1290. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1291. two 2D point sets.</div>
  1292. </td>
  1293. </tr>
  1294. <tr id="i58" class="altColor">
  1295. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1296. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1297. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1298. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1299. int&nbsp;method,
  1300. double&nbsp;ransacReprojThreshold)</code>
  1301. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1302. two 2D point sets.</div>
  1303. </td>
  1304. </tr>
  1305. <tr id="i59" class="rowColor">
  1306. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1307. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1308. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1309. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1310. int&nbsp;method,
  1311. double&nbsp;ransacReprojThreshold,
  1312. long&nbsp;maxIters)</code>
  1313. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1314. two 2D point sets.</div>
  1315. </td>
  1316. </tr>
  1317. <tr id="i60" class="altColor">
  1318. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1319. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1320. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1321. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1322. int&nbsp;method,
  1323. double&nbsp;ransacReprojThreshold,
  1324. long&nbsp;maxIters,
  1325. double&nbsp;confidence)</code>
  1326. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1327. two 2D point sets.</div>
  1328. </td>
  1329. </tr>
  1330. <tr id="i61" class="rowColor">
  1331. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1332. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-long-">estimateAffinePartial2D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  1333. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  1334. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1335. int&nbsp;method,
  1336. double&nbsp;ransacReprojThreshold,
  1337. long&nbsp;maxIters,
  1338. double&nbsp;confidence,
  1339. long&nbsp;refineIters)</code>
  1340. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  1341. two 2D point sets.</div>
  1342. </td>
  1343. </tr>
  1344. <tr id="i62" class="altColor">
  1345. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a></code></td>
  1346. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-">estimateChessboardSharpness</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1347. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1348. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners)</code>
  1349. <div class="block">Estimates the sharpness of a detected chessboard.</div>
  1350. </td>
  1351. </tr>
  1352. <tr id="i63" class="rowColor">
  1353. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a></code></td>
  1354. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-float-">estimateChessboardSharpness</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1355. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1356. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  1357. float&nbsp;rise_distance)</code>
  1358. <div class="block">Estimates the sharpness of a detected chessboard.</div>
  1359. </td>
  1360. </tr>
  1361. <tr id="i64" class="altColor">
  1362. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a></code></td>
  1363. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-float-boolean-">estimateChessboardSharpness</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1364. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1365. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  1366. float&nbsp;rise_distance,
  1367. boolean&nbsp;vertical)</code>
  1368. <div class="block">Estimates the sharpness of a detected chessboard.</div>
  1369. </td>
  1370. </tr>
  1371. <tr id="i65" class="rowColor">
  1372. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a></code></td>
  1373. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-float-boolean-org.opencv.core.Mat-">estimateChessboardSharpness</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1374. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1375. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  1376. float&nbsp;rise_distance,
  1377. boolean&nbsp;vertical,
  1378. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;sharpness)</code>
  1379. <div class="block">Estimates the sharpness of a detected chessboard.</div>
  1380. </td>
  1381. </tr>
  1382. <tr id="i66" class="altColor">
  1383. <td class="colFirst"><code>static int</code></td>
  1384. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateTranslation3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">estimateTranslation3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1385. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1386. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  1387. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</code>
  1388. <div class="block">Computes an optimal translation between two 3D point sets.</div>
  1389. </td>
  1390. </tr>
  1391. <tr id="i67" class="rowColor">
  1392. <td class="colFirst"><code>static int</code></td>
  1393. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateTranslation3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">estimateTranslation3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1394. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1395. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  1396. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1397. double&nbsp;ransacThreshold)</code>
  1398. <div class="block">Computes an optimal translation between two 3D point sets.</div>
  1399. </td>
  1400. </tr>
  1401. <tr id="i68" class="altColor">
  1402. <td class="colFirst"><code>static int</code></td>
  1403. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#estimateTranslation3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-double-">estimateTranslation3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  1404. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  1405. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  1406. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  1407. double&nbsp;ransacThreshold,
  1408. double&nbsp;confidence)</code>
  1409. <div class="block">Computes an optimal translation between two 3D point sets.</div>
  1410. </td>
  1411. </tr>
  1412. <tr id="i69" class="rowColor">
  1413. <td class="colFirst"><code>static void</code></td>
  1414. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#filterHomographyDecompByVisibleRefpoints-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">filterHomographyDecompByVisibleRefpoints</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rotations,
  1415. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;normals,
  1416. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;beforePoints,
  1417. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;afterPoints,
  1418. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;possibleSolutions)</code>
  1419. <div class="block">Filters homography decompositions based on additional information.</div>
  1420. </td>
  1421. </tr>
  1422. <tr id="i70" class="altColor">
  1423. <td class="colFirst"><code>static void</code></td>
  1424. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#filterHomographyDecompByVisibleRefpoints-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">filterHomographyDecompByVisibleRefpoints</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rotations,
  1425. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;normals,
  1426. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;beforePoints,
  1427. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;afterPoints,
  1428. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;possibleSolutions,
  1429. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pointsMask)</code>
  1430. <div class="block">Filters homography decompositions based on additional information.</div>
  1431. </td>
  1432. </tr>
  1433. <tr id="i71" class="rowColor">
  1434. <td class="colFirst"><code>static void</code></td>
  1435. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#filterSpeckles-org.opencv.core.Mat-double-int-double-">filterSpeckles</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  1436. double&nbsp;newVal,
  1437. int&nbsp;maxSpeckleSize,
  1438. double&nbsp;maxDiff)</code>
  1439. <div class="block">Filters off small noise blobs (speckles) in the disparity map</div>
  1440. </td>
  1441. </tr>
  1442. <tr id="i72" class="altColor">
  1443. <td class="colFirst"><code>static void</code></td>
  1444. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#filterSpeckles-org.opencv.core.Mat-double-int-double-org.opencv.core.Mat-">filterSpeckles</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  1445. double&nbsp;newVal,
  1446. int&nbsp;maxSpeckleSize,
  1447. double&nbsp;maxDiff,
  1448. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;buf)</code>
  1449. <div class="block">Filters off small noise blobs (speckles) in the disparity map</div>
  1450. </td>
  1451. </tr>
  1452. <tr id="i73" class="rowColor">
  1453. <td class="colFirst"><code>static boolean</code></td>
  1454. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#find4QuadCornerSubpix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-">find4QuadCornerSubpix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  1455. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  1456. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;region_size)</code>&nbsp;</td>
  1457. </tr>
  1458. <tr id="i74" class="altColor">
  1459. <td class="colFirst"><code>static boolean</code></td>
  1460. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findChessboardCorners-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.MatOfPoint2f-">findChessboardCorners</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1461. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1462. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;corners)</code>
  1463. <div class="block">Finds the positions of internal corners of the chessboard.</div>
  1464. </td>
  1465. </tr>
  1466. <tr id="i75" class="rowColor">
  1467. <td class="colFirst"><code>static boolean</code></td>
  1468. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findChessboardCorners-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.MatOfPoint2f-int-">findChessboardCorners</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1469. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1470. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;corners,
  1471. int&nbsp;flags)</code>
  1472. <div class="block">Finds the positions of internal corners of the chessboard.</div>
  1473. </td>
  1474. </tr>
  1475. <tr id="i76" class="altColor">
  1476. <td class="colFirst"><code>static boolean</code></td>
  1477. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findChessboardCornersSB-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-">findChessboardCornersSB</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1478. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1479. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners)</code>&nbsp;</td>
  1480. </tr>
  1481. <tr id="i77" class="rowColor">
  1482. <td class="colFirst"><code>static boolean</code></td>
  1483. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findChessboardCornersSB-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-int-">findChessboardCornersSB</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1484. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1485. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  1486. int&nbsp;flags)</code>&nbsp;</td>
  1487. </tr>
  1488. <tr id="i78" class="altColor">
  1489. <td class="colFirst"><code>static boolean</code></td>
  1490. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findChessboardCornersSBWithMeta-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-int-org.opencv.core.Mat-">findChessboardCornersSBWithMeta</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1491. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1492. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  1493. int&nbsp;flags,
  1494. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;meta)</code>
  1495. <div class="block">Finds the positions of internal corners of the chessboard using a sector based approach.</div>
  1496. </td>
  1497. </tr>
  1498. <tr id="i79" class="rowColor">
  1499. <td class="colFirst"><code>static boolean</code></td>
  1500. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findCirclesGrid-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-">findCirclesGrid</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1501. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1502. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;centers)</code>&nbsp;</td>
  1503. </tr>
  1504. <tr id="i80" class="altColor">
  1505. <td class="colFirst"><code>static boolean</code></td>
  1506. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findCirclesGrid-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-int-">findCirclesGrid</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  1507. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  1508. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;centers,
  1509. int&nbsp;flags)</code>&nbsp;</td>
  1510. </tr>
  1511. <tr id="i81" class="rowColor">
  1512. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1513. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1514. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2)</code>&nbsp;</td>
  1515. </tr>
  1516. <tr id="i82" class="altColor">
  1517. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1518. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1519. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1520. double&nbsp;focal)</code>&nbsp;</td>
  1521. </tr>
  1522. <tr id="i83" class="rowColor">
  1523. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1524. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1525. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1526. double&nbsp;focal,
  1527. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp)</code>&nbsp;</td>
  1528. </tr>
  1529. <tr id="i84" class="altColor">
  1530. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1531. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1532. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1533. double&nbsp;focal,
  1534. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  1535. int&nbsp;method)</code>&nbsp;</td>
  1536. </tr>
  1537. <tr id="i85" class="rowColor">
  1538. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1539. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1540. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1541. double&nbsp;focal,
  1542. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  1543. int&nbsp;method,
  1544. double&nbsp;prob)</code>&nbsp;</td>
  1545. </tr>
  1546. <tr id="i86" class="altColor">
  1547. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1548. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1549. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1550. double&nbsp;focal,
  1551. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  1552. int&nbsp;method,
  1553. double&nbsp;prob,
  1554. double&nbsp;threshold)</code>&nbsp;</td>
  1555. </tr>
  1556. <tr id="i87" class="rowColor">
  1557. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1558. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-double-int-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1559. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1560. double&nbsp;focal,
  1561. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  1562. int&nbsp;method,
  1563. double&nbsp;prob,
  1564. double&nbsp;threshold,
  1565. int&nbsp;maxIters)</code>&nbsp;</td>
  1566. </tr>
  1567. <tr id="i88" class="altColor">
  1568. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1569. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-double-int-org.opencv.core.Mat-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1570. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1571. double&nbsp;focal,
  1572. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  1573. int&nbsp;method,
  1574. double&nbsp;prob,
  1575. double&nbsp;threshold,
  1576. int&nbsp;maxIters,
  1577. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>&nbsp;</td>
  1578. </tr>
  1579. <tr id="i89" class="rowColor">
  1580. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1581. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1582. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1583. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix)</code>
  1584. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  1585. </td>
  1586. </tr>
  1587. <tr id="i90" class="altColor">
  1588. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1589. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1590. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1591. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1592. int&nbsp;method)</code>
  1593. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  1594. </td>
  1595. </tr>
  1596. <tr id="i91" class="rowColor">
  1597. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1598. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1599. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1600. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1601. int&nbsp;method,
  1602. double&nbsp;prob)</code>
  1603. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  1604. </td>
  1605. </tr>
  1606. <tr id="i92" class="altColor">
  1607. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1608. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1609. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1610. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1611. int&nbsp;method,
  1612. double&nbsp;prob,
  1613. double&nbsp;threshold)</code>
  1614. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  1615. </td>
  1616. </tr>
  1617. <tr id="i93" class="rowColor">
  1618. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1619. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-int-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1620. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1621. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1622. int&nbsp;method,
  1623. double&nbsp;prob,
  1624. double&nbsp;threshold,
  1625. int&nbsp;maxIters)</code>
  1626. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  1627. </td>
  1628. </tr>
  1629. <tr id="i94" class="altColor">
  1630. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1631. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-int-org.opencv.core.Mat-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1632. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1633. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  1634. int&nbsp;method,
  1635. double&nbsp;prob,
  1636. double&nbsp;threshold,
  1637. int&nbsp;maxIters,
  1638. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>
  1639. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  1640. </td>
  1641. </tr>
  1642. <tr id="i95" class="rowColor">
  1643. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1644. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1645. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1646. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  1647. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  1648. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  1649. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2)</code>
  1650. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  1651. </td>
  1652. </tr>
  1653. <tr id="i96" class="altColor">
  1654. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1655. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1656. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1657. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  1658. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  1659. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  1660. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  1661. int&nbsp;method)</code>
  1662. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  1663. </td>
  1664. </tr>
  1665. <tr id="i97" class="rowColor">
  1666. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1667. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1668. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1669. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  1670. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  1671. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  1672. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  1673. int&nbsp;method,
  1674. double&nbsp;prob)</code>
  1675. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  1676. </td>
  1677. </tr>
  1678. <tr id="i98" class="altColor">
  1679. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1680. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1681. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1682. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  1683. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  1684. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  1685. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  1686. int&nbsp;method,
  1687. double&nbsp;prob,
  1688. double&nbsp;threshold)</code>
  1689. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  1690. </td>
  1691. </tr>
  1692. <tr id="i99" class="rowColor">
  1693. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1694. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-org.opencv.core.Mat-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1695. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1696. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  1697. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  1698. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  1699. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  1700. int&nbsp;method,
  1701. double&nbsp;prob,
  1702. double&nbsp;threshold,
  1703. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>
  1704. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  1705. </td>
  1706. </tr>
  1707. <tr id="i100" class="altColor">
  1708. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1709. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">findEssentialMat</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  1710. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  1711. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  1712. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  1713. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dist_coeff1,
  1714. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dist_coeff2,
  1715. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  1716. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</code>&nbsp;</td>
  1717. </tr>
  1718. <tr id="i101" class="rowColor">
  1719. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1720. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1721. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2)</code>&nbsp;</td>
  1722. </tr>
  1723. <tr id="i102" class="altColor">
  1724. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1725. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1726. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1727. int&nbsp;method)</code>&nbsp;</td>
  1728. </tr>
  1729. <tr id="i103" class="rowColor">
  1730. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1731. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1732. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1733. int&nbsp;method,
  1734. double&nbsp;ransacReprojThreshold)</code>&nbsp;</td>
  1735. </tr>
  1736. <tr id="i104" class="altColor">
  1737. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1738. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1739. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1740. int&nbsp;method,
  1741. double&nbsp;ransacReprojThreshold,
  1742. double&nbsp;confidence)</code>&nbsp;</td>
  1743. </tr>
  1744. <tr id="i105" class="rowColor">
  1745. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1746. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-int-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1747. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1748. int&nbsp;method,
  1749. double&nbsp;ransacReprojThreshold,
  1750. double&nbsp;confidence,
  1751. int&nbsp;maxIters)</code>
  1752. <div class="block">Calculates a fundamental matrix from the corresponding points in two images.</div>
  1753. </td>
  1754. </tr>
  1755. <tr id="i106" class="altColor">
  1756. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1757. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-int-org.opencv.core.Mat-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1758. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1759. int&nbsp;method,
  1760. double&nbsp;ransacReprojThreshold,
  1761. double&nbsp;confidence,
  1762. int&nbsp;maxIters,
  1763. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>
  1764. <div class="block">Calculates a fundamental matrix from the corresponding points in two images.</div>
  1765. </td>
  1766. </tr>
  1767. <tr id="i107" class="rowColor">
  1768. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1769. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-org.opencv.core.Mat-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1770. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1771. int&nbsp;method,
  1772. double&nbsp;ransacReprojThreshold,
  1773. double&nbsp;confidence,
  1774. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>&nbsp;</td>
  1775. </tr>
  1776. <tr id="i108" class="altColor">
  1777. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1778. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">findFundamentalMat</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  1779. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  1780. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  1781. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</code>&nbsp;</td>
  1782. </tr>
  1783. <tr id="i109" class="rowColor">
  1784. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1785. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1786. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints)</code>
  1787. <div class="block">Finds a perspective transformation between two planes.</div>
  1788. </td>
  1789. </tr>
  1790. <tr id="i110" class="altColor">
  1791. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1792. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1793. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  1794. int&nbsp;method)</code>
  1795. <div class="block">Finds a perspective transformation between two planes.</div>
  1796. </td>
  1797. </tr>
  1798. <tr id="i111" class="rowColor">
  1799. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1800. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1801. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  1802. int&nbsp;method,
  1803. double&nbsp;ransacReprojThreshold)</code>
  1804. <div class="block">Finds a perspective transformation between two planes.</div>
  1805. </td>
  1806. </tr>
  1807. <tr id="i112" class="altColor">
  1808. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1809. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-org.opencv.core.Mat-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1810. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  1811. int&nbsp;method,
  1812. double&nbsp;ransacReprojThreshold,
  1813. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>
  1814. <div class="block">Finds a perspective transformation between two planes.</div>
  1815. </td>
  1816. </tr>
  1817. <tr id="i113" class="rowColor">
  1818. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1819. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-org.opencv.core.Mat-int-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1820. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  1821. int&nbsp;method,
  1822. double&nbsp;ransacReprojThreshold,
  1823. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  1824. int&nbsp;maxIters)</code>
  1825. <div class="block">Finds a perspective transformation between two planes.</div>
  1826. </td>
  1827. </tr>
  1828. <tr id="i114" class="altColor">
  1829. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1830. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-org.opencv.core.Mat-int-double-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1831. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  1832. int&nbsp;method,
  1833. double&nbsp;ransacReprojThreshold,
  1834. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  1835. int&nbsp;maxIters,
  1836. double&nbsp;confidence)</code>
  1837. <div class="block">Finds a perspective transformation between two planes.</div>
  1838. </td>
  1839. </tr>
  1840. <tr id="i115" class="rowColor">
  1841. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  1842. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">findHomography</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  1843. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  1844. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  1845. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</code>&nbsp;</td>
  1846. </tr>
  1847. <tr id="i116" class="altColor">
  1848. <td class="colFirst"><code>static double</code></td>
  1849. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_calibrate-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">fisheye_calibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  1850. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  1851. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1852. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1853. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1854. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  1855. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</code>
  1856. <div class="block">Performs camera calibration</div>
  1857. </td>
  1858. </tr>
  1859. <tr id="i117" class="rowColor">
  1860. <td class="colFirst"><code>static double</code></td>
  1861. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_calibrate-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">fisheye_calibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  1862. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  1863. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1864. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1865. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1866. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  1867. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  1868. int&nbsp;flags)</code>
  1869. <div class="block">Performs camera calibration</div>
  1870. </td>
  1871. </tr>
  1872. <tr id="i118" class="altColor">
  1873. <td class="colFirst"><code>static double</code></td>
  1874. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_calibrate-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-org.opencv.core.TermCriteria-">fisheye_calibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  1875. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  1876. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1877. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1878. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1879. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  1880. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  1881. int&nbsp;flags,
  1882. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  1883. <div class="block">Performs camera calibration</div>
  1884. </td>
  1885. </tr>
  1886. <tr id="i119" class="rowColor">
  1887. <td class="colFirst"><code>static void</code></td>
  1888. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_distortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_distortPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  1889. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  1890. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1891. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</code>
  1892. <div class="block">Distorts 2D points using fisheye model.</div>
  1893. </td>
  1894. </tr>
  1895. <tr id="i120" class="altColor">
  1896. <td class="colFirst"><code>static void</code></td>
  1897. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_distortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">fisheye_distortPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  1898. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  1899. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1900. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1901. double&nbsp;alpha)</code>
  1902. <div class="block">Distorts 2D points using fisheye model.</div>
  1903. </td>
  1904. </tr>
  1905. <tr id="i121" class="rowColor">
  1906. <td class="colFirst"><code>static void</code></td>
  1907. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_estimateNewCameraMatrixForUndistortRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1908. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1909. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1910. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  1911. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P)</code>
  1912. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  1913. </td>
  1914. </tr>
  1915. <tr id="i122" class="altColor">
  1916. <td class="colFirst"><code>static void</code></td>
  1917. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-">fisheye_estimateNewCameraMatrixForUndistortRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1918. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1919. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1920. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  1921. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  1922. double&nbsp;balance)</code>
  1923. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  1924. </td>
  1925. </tr>
  1926. <tr id="i123" class="rowColor">
  1927. <td class="colFirst"><code>static void</code></td>
  1928. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Size-">fisheye_estimateNewCameraMatrixForUndistortRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1929. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1930. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1931. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  1932. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  1933. double&nbsp;balance,
  1934. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;new_size)</code>
  1935. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  1936. </td>
  1937. </tr>
  1938. <tr id="i124" class="altColor">
  1939. <td class="colFirst"><code>static void</code></td>
  1940. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Size-double-">fisheye_estimateNewCameraMatrixForUndistortRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1941. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1942. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  1943. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  1944. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  1945. double&nbsp;balance,
  1946. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;new_size,
  1947. double&nbsp;fov_scale)</code>
  1948. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  1949. </td>
  1950. </tr>
  1951. <tr id="i125" class="rowColor">
  1952. <td class="colFirst"><code>static void</code></td>
  1953. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_initUndistortRectifyMap-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_initUndistortRectifyMap</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1954. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1955. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  1956. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  1957. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size,
  1958. int&nbsp;m1type,
  1959. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map1,
  1960. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map2)</code>
  1961. <div class="block">Computes undistortion and rectification maps for image transform by #remap.</div>
  1962. </td>
  1963. </tr>
  1964. <tr id="i126" class="altColor">
  1965. <td class="colFirst"><code>static void</code></td>
  1966. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_projectPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_projectPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  1967. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  1968. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  1969. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  1970. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1971. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</code>&nbsp;</td>
  1972. </tr>
  1973. <tr id="i127" class="rowColor">
  1974. <td class="colFirst"><code>static void</code></td>
  1975. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_projectPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">fisheye_projectPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  1976. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  1977. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  1978. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  1979. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1980. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1981. double&nbsp;alpha)</code>&nbsp;</td>
  1982. </tr>
  1983. <tr id="i128" class="altColor">
  1984. <td class="colFirst"><code>static void</code></td>
  1985. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_projectPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Mat-">fisheye_projectPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  1986. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  1987. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  1988. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  1989. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  1990. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  1991. double&nbsp;alpha,
  1992. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian)</code>&nbsp;</td>
  1993. </tr>
  1994. <tr id="i129" class="rowColor">
  1995. <td class="colFirst"><code>static double</code></td>
  1996. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  1997. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  1998. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  1999. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2000. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2001. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2002. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2003. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2004. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2005. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T)</code>&nbsp;</td>
  2006. </tr>
  2007. <tr id="i130" class="altColor">
  2008. <td class="colFirst"><code>static double</code></td>
  2009. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-int-">fisheye_stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  2010. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  2011. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  2012. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2013. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2014. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2015. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2016. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2017. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2018. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  2019. int&nbsp;flags)</code>&nbsp;</td>
  2020. </tr>
  2021. <tr id="i131" class="rowColor">
  2022. <td class="colFirst"><code>static double</code></td>
  2023. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">fisheye_stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  2024. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  2025. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  2026. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2027. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2028. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2029. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2030. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2031. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2032. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  2033. int&nbsp;flags,
  2034. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>&nbsp;</td>
  2035. </tr>
  2036. <tr id="i132" class="altColor">
  2037. <td class="colFirst"><code>static double</code></td>
  2038. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">fisheye_stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  2039. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  2040. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  2041. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2042. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2043. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2044. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2045. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2046. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2047. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  2048. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2049. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</code>
  2050. <div class="block">Performs stereo calibration</div>
  2051. </td>
  2052. </tr>
  2053. <tr id="i133" class="rowColor">
  2054. <td class="colFirst"><code>static double</code></td>
  2055. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">fisheye_stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  2056. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  2057. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  2058. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2059. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2060. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2061. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2062. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2063. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2064. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  2065. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2066. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2067. int&nbsp;flags)</code>
  2068. <div class="block">Performs stereo calibration</div>
  2069. </td>
  2070. </tr>
  2071. <tr id="i134" class="altColor">
  2072. <td class="colFirst"><code>static double</code></td>
  2073. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-org.opencv.core.TermCriteria-">fisheye_stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  2074. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  2075. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  2076. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2077. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2078. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2079. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2080. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2081. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2082. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  2083. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2084. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2085. int&nbsp;flags,
  2086. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  2087. <div class="block">Performs stereo calibration</div>
  2088. </td>
  2089. </tr>
  2090. <tr id="i135" class="rowColor">
  2091. <td class="colFirst"><code>static void</code></td>
  2092. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">fisheye_stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2093. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2094. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2095. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2096. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2097. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2098. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2099. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  2100. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  2101. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  2102. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  2103. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2104. int&nbsp;flags)</code>
  2105. <div class="block">Stereo rectification for fisheye camera model</div>
  2106. </td>
  2107. </tr>
  2108. <tr id="i136" class="altColor">
  2109. <td class="colFirst"><code>static void</code></td>
  2110. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.Size-">fisheye_stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2111. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2112. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2113. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2114. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2115. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2116. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2117. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  2118. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  2119. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  2120. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  2121. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2122. int&nbsp;flags,
  2123. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize)</code>
  2124. <div class="block">Stereo rectification for fisheye camera model</div>
  2125. </td>
  2126. </tr>
  2127. <tr id="i137" class="rowColor">
  2128. <td class="colFirst"><code>static void</code></td>
  2129. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.Size-double-">fisheye_stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2130. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2131. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2132. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2133. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2134. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2135. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2136. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  2137. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  2138. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  2139. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  2140. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2141. int&nbsp;flags,
  2142. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  2143. double&nbsp;balance)</code>
  2144. <div class="block">Stereo rectification for fisheye camera model</div>
  2145. </td>
  2146. </tr>
  2147. <tr id="i138" class="altColor">
  2148. <td class="colFirst"><code>static void</code></td>
  2149. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.Size-double-double-">fisheye_stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  2150. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  2151. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  2152. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  2153. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2154. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2155. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2156. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  2157. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  2158. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  2159. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  2160. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2161. int&nbsp;flags,
  2162. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  2163. double&nbsp;balance,
  2164. double&nbsp;fov_scale)</code>
  2165. <div class="block">Stereo rectification for fisheye camera model</div>
  2166. </td>
  2167. </tr>
  2168. <tr id="i139" class="rowColor">
  2169. <td class="colFirst"><code>static void</code></td>
  2170. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortImage-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_undistortImage</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2171. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2172. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2173. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</code>
  2174. <div class="block">Transforms an image to compensate for fisheye lens distortion.</div>
  2175. </td>
  2176. </tr>
  2177. <tr id="i140" class="altColor">
  2178. <td class="colFirst"><code>static void</code></td>
  2179. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortImage-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_undistortImage</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2180. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2181. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2182. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  2183. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Knew)</code>
  2184. <div class="block">Transforms an image to compensate for fisheye lens distortion.</div>
  2185. </td>
  2186. </tr>
  2187. <tr id="i141" class="rowColor">
  2188. <td class="colFirst"><code>static void</code></td>
  2189. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortImage-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-">fisheye_undistortImage</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2190. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2191. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2192. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  2193. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Knew,
  2194. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;new_size)</code>
  2195. <div class="block">Transforms an image to compensate for fisheye lens distortion.</div>
  2196. </td>
  2197. </tr>
  2198. <tr id="i142" class="altColor">
  2199. <td class="colFirst"><code>static void</code></td>
  2200. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_undistortPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2201. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2202. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2203. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</code>
  2204. <div class="block">Undistorts 2D points using fisheye model</div>
  2205. </td>
  2206. </tr>
  2207. <tr id="i143" class="rowColor">
  2208. <td class="colFirst"><code>static void</code></td>
  2209. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_undistortPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2210. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2211. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2212. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  2213. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R)</code>
  2214. <div class="block">Undistorts 2D points using fisheye model</div>
  2215. </td>
  2216. </tr>
  2217. <tr id="i144" class="altColor">
  2218. <td class="colFirst"><code>static void</code></td>
  2219. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">fisheye_undistortPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2220. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2221. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2222. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  2223. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2224. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P)</code>
  2225. <div class="block">Undistorts 2D points using fisheye model</div>
  2226. </td>
  2227. </tr>
  2228. <tr id="i145" class="rowColor">
  2229. <td class="colFirst"><code>static void</code></td>
  2230. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">fisheye_undistortPoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  2231. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  2232. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  2233. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  2234. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2235. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  2236. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  2237. <div class="block">Undistorts 2D points using fisheye model</div>
  2238. </td>
  2239. </tr>
  2240. <tr id="i146" class="altColor">
  2241. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2242. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getDefaultNewCameraMatrix-org.opencv.core.Mat-">getDefaultNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix)</code>
  2243. <div class="block">Returns the default new camera matrix.</div>
  2244. </td>
  2245. </tr>
  2246. <tr id="i147" class="rowColor">
  2247. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2248. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getDefaultNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Size-">getDefaultNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2249. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgsize)</code>
  2250. <div class="block">Returns the default new camera matrix.</div>
  2251. </td>
  2252. </tr>
  2253. <tr id="i148" class="altColor">
  2254. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2255. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getDefaultNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Size-boolean-">getDefaultNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2256. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgsize,
  2257. boolean&nbsp;centerPrincipalPoint)</code>
  2258. <div class="block">Returns the default new camera matrix.</div>
  2259. </td>
  2260. </tr>
  2261. <tr id="i149" class="rowColor">
  2262. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2263. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-">getOptimalNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2264. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2265. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2266. double&nbsp;alpha)</code>
  2267. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  2268. </td>
  2269. </tr>
  2270. <tr id="i150" class="altColor">
  2271. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2272. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-org.opencv.core.Size-">getOptimalNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2273. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2274. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2275. double&nbsp;alpha,
  2276. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize)</code>
  2277. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  2278. </td>
  2279. </tr>
  2280. <tr id="i151" class="rowColor">
  2281. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2282. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-org.opencv.core.Size-org.opencv.core.Rect-">getOptimalNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2283. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2284. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2285. double&nbsp;alpha,
  2286. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize,
  2287. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI)</code>
  2288. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  2289. </td>
  2290. </tr>
  2291. <tr id="i152" class="altColor">
  2292. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2293. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-org.opencv.core.Size-org.opencv.core.Rect-boolean-">getOptimalNewCameraMatrix</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2294. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2295. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2296. double&nbsp;alpha,
  2297. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize,
  2298. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI,
  2299. boolean&nbsp;centerPrincipalPoint)</code>
  2300. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  2301. </td>
  2302. </tr>
  2303. <tr id="i153" class="rowColor">
  2304. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a></code></td>
  2305. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#getValidDisparityROI-org.opencv.core.Rect-org.opencv.core.Rect-int-int-int-">getValidDisparityROI</a></span>(<a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi1,
  2306. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi2,
  2307. int&nbsp;minDisparity,
  2308. int&nbsp;numberOfDisparities,
  2309. int&nbsp;blockSize)</code>&nbsp;</td>
  2310. </tr>
  2311. <tr id="i154" class="altColor">
  2312. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2313. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#initCameraMatrix2D-java.util.List-java.util.List-org.opencv.core.Size-">initCameraMatrix2D</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&gt;&nbsp;objectPoints,
  2314. java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&gt;&nbsp;imagePoints,
  2315. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize)</code>
  2316. <div class="block">Finds an initial camera intrinsic matrix from 3D-2D point correspondences.</div>
  2317. </td>
  2318. </tr>
  2319. <tr id="i155" class="rowColor">
  2320. <td class="colFirst"><code>static <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a></code></td>
  2321. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#initCameraMatrix2D-java.util.List-java.util.List-org.opencv.core.Size-double-">initCameraMatrix2D</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&gt;&nbsp;objectPoints,
  2322. java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&gt;&nbsp;imagePoints,
  2323. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2324. double&nbsp;aspectRatio)</code>
  2325. <div class="block">Finds an initial camera intrinsic matrix from 3D-2D point correspondences.</div>
  2326. </td>
  2327. </tr>
  2328. <tr id="i156" class="altColor">
  2329. <td class="colFirst"><code>static void</code></td>
  2330. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#initInverseRectificationMap-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-">initInverseRectificationMap</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2331. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2332. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2333. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newCameraMatrix,
  2334. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size,
  2335. int&nbsp;m1type,
  2336. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map1,
  2337. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map2)</code>
  2338. <div class="block">Computes the projection and inverse-rectification transformation map.</div>
  2339. </td>
  2340. </tr>
  2341. <tr id="i157" class="rowColor">
  2342. <td class="colFirst"><code>static void</code></td>
  2343. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#initUndistortRectifyMap-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-">initUndistortRectifyMap</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2344. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2345. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2346. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newCameraMatrix,
  2347. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size,
  2348. int&nbsp;m1type,
  2349. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map1,
  2350. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map2)</code>
  2351. <div class="block">Computes the undistortion and rectification transformation map.</div>
  2352. </td>
  2353. </tr>
  2354. <tr id="i158" class="altColor">
  2355. <td class="colFirst"><code>static void</code></td>
  2356. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#matMulDeriv-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">matMulDeriv</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;A,
  2357. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;B,
  2358. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dABdA,
  2359. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dABdB)</code>
  2360. <div class="block">Computes partial derivatives of the matrix product for each multiplied matrix.</div>
  2361. </td>
  2362. </tr>
  2363. <tr id="i159" class="rowColor">
  2364. <td class="colFirst"><code>static void</code></td>
  2365. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#projectPoints-org.opencv.core.MatOfPoint3f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.MatOfPoint2f-">projectPoints</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2366. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2367. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2368. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2369. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2370. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints)</code>
  2371. <div class="block">Projects 3D points to an image plane.</div>
  2372. </td>
  2373. </tr>
  2374. <tr id="i160" class="altColor">
  2375. <td class="colFirst"><code>static void</code></td>
  2376. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#projectPoints-org.opencv.core.MatOfPoint3f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-">projectPoints</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2377. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2378. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2379. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2380. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2381. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2382. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian)</code>
  2383. <div class="block">Projects 3D points to an image plane.</div>
  2384. </td>
  2385. </tr>
  2386. <tr id="i161" class="rowColor">
  2387. <td class="colFirst"><code>static void</code></td>
  2388. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#projectPoints-org.opencv.core.MatOfPoint3f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-double-">projectPoints</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2389. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2390. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2391. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2392. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2393. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2394. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian,
  2395. double&nbsp;aspectRatio)</code>
  2396. <div class="block">Projects 3D points to an image plane.</div>
  2397. </td>
  2398. </tr>
  2399. <tr id="i162" class="altColor">
  2400. <td class="colFirst"><code>static int</code></td>
  2401. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2402. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2403. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2404. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2405. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</code>&nbsp;</td>
  2406. </tr>
  2407. <tr id="i163" class="rowColor">
  2408. <td class="colFirst"><code>static int</code></td>
  2409. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2410. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2411. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2412. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2413. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2414. double&nbsp;focal)</code>&nbsp;</td>
  2415. </tr>
  2416. <tr id="i164" class="altColor">
  2417. <td class="colFirst"><code>static int</code></td>
  2418. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2419. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2420. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2421. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2422. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2423. double&nbsp;focal,
  2424. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp)</code>&nbsp;</td>
  2425. </tr>
  2426. <tr id="i165" class="rowColor">
  2427. <td class="colFirst"><code>static int</code></td>
  2428. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2429. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2430. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2431. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2432. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2433. double&nbsp;focal,
  2434. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  2435. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>&nbsp;</td>
  2436. </tr>
  2437. <tr id="i166" class="altColor">
  2438. <td class="colFirst"><code>static int</code></td>
  2439. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2440. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2441. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2442. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2443. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2444. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</code>
  2445. <div class="block">Recovers the relative camera rotation and the translation from an estimated essential
  2446. matrix and the corresponding points in two images, using chirality check.</div>
  2447. </td>
  2448. </tr>
  2449. <tr id="i167" class="rowColor">
  2450. <td class="colFirst"><code>static int</code></td>
  2451. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2452. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2453. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2454. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2455. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2456. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2457. double&nbsp;distanceThresh)</code>&nbsp;</td>
  2458. </tr>
  2459. <tr id="i168" class="altColor">
  2460. <td class="colFirst"><code>static int</code></td>
  2461. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2462. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2463. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2464. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2465. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2466. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2467. double&nbsp;distanceThresh,
  2468. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>&nbsp;</td>
  2469. </tr>
  2470. <tr id="i169" class="rowColor">
  2471. <td class="colFirst"><code>static int</code></td>
  2472. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Mat-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2473. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2474. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2475. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2476. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2477. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2478. double&nbsp;distanceThresh,
  2479. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  2480. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;triangulatedPoints)</code>&nbsp;</td>
  2481. </tr>
  2482. <tr id="i170" class="altColor">
  2483. <td class="colFirst"><code>static int</code></td>
  2484. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2485. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2486. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2487. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2488. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2489. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2490. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>
  2491. <div class="block">Recovers the relative camera rotation and the translation from an estimated essential
  2492. matrix and the corresponding points in two images, using chirality check.</div>
  2493. </td>
  2494. </tr>
  2495. <tr id="i171" class="rowColor">
  2496. <td class="colFirst"><code>static int</code></td>
  2497. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2498. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2499. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  2500. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  2501. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  2502. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  2503. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2504. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2505. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</code>
  2506. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check.</div>
  2507. </td>
  2508. </tr>
  2509. <tr id="i172" class="altColor">
  2510. <td class="colFirst"><code>static int</code></td>
  2511. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2512. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2513. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  2514. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  2515. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  2516. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  2517. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2518. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2519. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2520. int&nbsp;method)</code>
  2521. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check.</div>
  2522. </td>
  2523. </tr>
  2524. <tr id="i173" class="rowColor">
  2525. <td class="colFirst"><code>static int</code></td>
  2526. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2527. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2528. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  2529. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  2530. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  2531. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  2532. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2533. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2534. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2535. int&nbsp;method,
  2536. double&nbsp;prob)</code>
  2537. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check.</div>
  2538. </td>
  2539. </tr>
  2540. <tr id="i174" class="altColor">
  2541. <td class="colFirst"><code>static int</code></td>
  2542. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2543. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2544. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  2545. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  2546. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  2547. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  2548. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2549. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2550. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2551. int&nbsp;method,
  2552. double&nbsp;prob,
  2553. double&nbsp;threshold)</code>
  2554. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check.</div>
  2555. </td>
  2556. </tr>
  2557. <tr id="i175" class="rowColor">
  2558. <td class="colFirst"><code>static int</code></td>
  2559. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-org.opencv.core.Mat-">recoverPose</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  2560. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  2561. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  2562. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  2563. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  2564. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  2565. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  2566. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  2567. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  2568. int&nbsp;method,
  2569. double&nbsp;prob,
  2570. double&nbsp;threshold,
  2571. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</code>
  2572. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check.</div>
  2573. </td>
  2574. </tr>
  2575. <tr id="i176" class="altColor">
  2576. <td class="colFirst"><code>static float</code></td>
  2577. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#rectify3Collinear-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Size-org.opencv.core.Rect-org.opencv.core.Rect-int-">rectify3Collinear</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  2578. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  2579. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  2580. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  2581. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix3,
  2582. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs3,
  2583. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imgpt1,
  2584. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imgpt3,
  2585. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  2586. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R12,
  2587. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T12,
  2588. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R13,
  2589. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T13,
  2590. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  2591. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  2592. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R3,
  2593. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  2594. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  2595. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P3,
  2596. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2597. double&nbsp;alpha,
  2598. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize,
  2599. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi1,
  2600. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi2,
  2601. int&nbsp;flags)</code>&nbsp;</td>
  2602. </tr>
  2603. <tr id="i177" class="rowColor">
  2604. <td class="colFirst"><code>static void</code></td>
  2605. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#reprojectImageTo3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">reprojectImageTo3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  2606. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;_3dImage,
  2607. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q)</code>
  2608. <div class="block">Reprojects a disparity image to 3D space.</div>
  2609. </td>
  2610. </tr>
  2611. <tr id="i178" class="altColor">
  2612. <td class="colFirst"><code>static void</code></td>
  2613. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#reprojectImageTo3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-boolean-">reprojectImageTo3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  2614. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;_3dImage,
  2615. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2616. boolean&nbsp;handleMissingValues)</code>
  2617. <div class="block">Reprojects a disparity image to 3D space.</div>
  2618. </td>
  2619. </tr>
  2620. <tr id="i179" class="rowColor">
  2621. <td class="colFirst"><code>static void</code></td>
  2622. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#reprojectImageTo3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-">reprojectImageTo3D</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  2623. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;_3dImage,
  2624. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  2625. boolean&nbsp;handleMissingValues,
  2626. int&nbsp;ddepth)</code>
  2627. <div class="block">Reprojects a disparity image to 3D space.</div>
  2628. </td>
  2629. </tr>
  2630. <tr id="i180" class="altColor">
  2631. <td class="colFirst"><code>static void</code></td>
  2632. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#Rodrigues-org.opencv.core.Mat-org.opencv.core.Mat-">Rodrigues</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  2633. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</code>
  2634. <div class="block">Converts a rotation matrix to a rotation vector or vice versa.</div>
  2635. </td>
  2636. </tr>
  2637. <tr id="i181" class="rowColor">
  2638. <td class="colFirst"><code>static void</code></td>
  2639. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#Rodrigues-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">Rodrigues</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  2640. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  2641. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian)</code>
  2642. <div class="block">Converts a rotation matrix to a rotation vector or vice versa.</div>
  2643. </td>
  2644. </tr>
  2645. <tr id="i182" class="altColor">
  2646. <td class="colFirst"><code>static double[]</code></td>
  2647. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">RQDecomp3x3</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  2648. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  2649. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ)</code>
  2650. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  2651. </td>
  2652. </tr>
  2653. <tr id="i183" class="rowColor">
  2654. <td class="colFirst"><code>static double[]</code></td>
  2655. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">RQDecomp3x3</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  2656. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  2657. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ,
  2658. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qx)</code>
  2659. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  2660. </td>
  2661. </tr>
  2662. <tr id="i184" class="altColor">
  2663. <td class="colFirst"><code>static double[]</code></td>
  2664. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">RQDecomp3x3</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  2665. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  2666. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ,
  2667. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qx,
  2668. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qy)</code>
  2669. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  2670. </td>
  2671. </tr>
  2672. <tr id="i185" class="rowColor">
  2673. <td class="colFirst"><code>static double[]</code></td>
  2674. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">RQDecomp3x3</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  2675. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  2676. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ,
  2677. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qx,
  2678. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qy,
  2679. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qz)</code>
  2680. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  2681. </td>
  2682. </tr>
  2683. <tr id="i186" class="altColor">
  2684. <td class="colFirst"><code>static double</code></td>
  2685. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#sampsonDistance-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">sampsonDistance</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pt1,
  2686. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pt2,
  2687. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F)</code>
  2688. <div class="block">Calculates the Sampson Distance between two points.</div>
  2689. </td>
  2690. </tr>
  2691. <tr id="i187" class="rowColor">
  2692. <td class="colFirst"><code>static int</code></td>
  2693. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solveP3P-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">solveP3P</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2694. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2695. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2696. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2697. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2698. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2699. int&nbsp;flags)</code>
  2700. <div class="block">Finds an object pose from 3 3D-2D point correspondences.</div>
  2701. </td>
  2702. </tr>
  2703. <tr id="i188" class="altColor">
  2704. <td class="colFirst"><code>static boolean</code></td>
  2705. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnP-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnP</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2706. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2707. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2708. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2709. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2710. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</code>
  2711. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2712. </td>
  2713. </tr>
  2714. <tr id="i189" class="rowColor">
  2715. <td class="colFirst"><code>static boolean</code></td>
  2716. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnP-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-">solvePnP</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2717. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2718. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2719. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2720. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2721. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2722. boolean&nbsp;useExtrinsicGuess)</code>
  2723. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2724. </td>
  2725. </tr>
  2726. <tr id="i190" class="altColor">
  2727. <td class="colFirst"><code>static boolean</code></td>
  2728. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnP-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-">solvePnP</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2729. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2730. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2731. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2732. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2733. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2734. boolean&nbsp;useExtrinsicGuess,
  2735. int&nbsp;flags)</code>
  2736. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2737. </td>
  2738. </tr>
  2739. <tr id="i191" class="rowColor">
  2740. <td class="colFirst"><code>static int</code></td>
  2741. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">solvePnPGeneric</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2742. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2743. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2744. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2745. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2746. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</code>
  2747. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2748. </td>
  2749. </tr>
  2750. <tr id="i192" class="altColor">
  2751. <td class="colFirst"><code>static int</code></td>
  2752. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-">solvePnPGeneric</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2753. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2754. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2755. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2756. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2757. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2758. boolean&nbsp;useExtrinsicGuess)</code>
  2759. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2760. </td>
  2761. </tr>
  2762. <tr id="i193" class="rowColor">
  2763. <td class="colFirst"><code>static int</code></td>
  2764. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-">solvePnPGeneric</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2765. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2766. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2767. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2768. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2769. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2770. boolean&nbsp;useExtrinsicGuess,
  2771. int&nbsp;flags)</code>
  2772. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2773. </td>
  2774. </tr>
  2775. <tr id="i194" class="altColor">
  2776. <td class="colFirst"><code>static int</code></td>
  2777. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-org.opencv.core.Mat-">solvePnPGeneric</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2778. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2779. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2780. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2781. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2782. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2783. boolean&nbsp;useExtrinsicGuess,
  2784. int&nbsp;flags,
  2785. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec)</code>
  2786. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2787. </td>
  2788. </tr>
  2789. <tr id="i195" class="rowColor">
  2790. <td class="colFirst"><code>static int</code></td>
  2791. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnPGeneric</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2792. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2793. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2794. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2795. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2796. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2797. boolean&nbsp;useExtrinsicGuess,
  2798. int&nbsp;flags,
  2799. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2800. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</code>
  2801. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2802. </td>
  2803. </tr>
  2804. <tr id="i196" class="altColor">
  2805. <td class="colFirst"><code>static int</code></td>
  2806. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnPGeneric</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2807. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2808. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2809. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2810. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  2811. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  2812. boolean&nbsp;useExtrinsicGuess,
  2813. int&nbsp;flags,
  2814. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2815. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2816. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;reprojectionError)</code>
  2817. <div class="block">Finds an object pose from 3D-2D point correspondences.</div>
  2818. </td>
  2819. </tr>
  2820. <tr id="i197" class="rowColor">
  2821. <td class="colFirst"><code>static boolean</code></td>
  2822. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2823. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2824. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2825. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2826. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2827. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</code>
  2828. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2829. </td>
  2830. </tr>
  2831. <tr id="i198" class="altColor">
  2832. <td class="colFirst"><code>static boolean</code></td>
  2833. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2834. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2835. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2836. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2837. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2838. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2839. boolean&nbsp;useExtrinsicGuess)</code>
  2840. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2841. </td>
  2842. </tr>
  2843. <tr id="i199" class="rowColor">
  2844. <td class="colFirst"><code>static boolean</code></td>
  2845. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2846. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2847. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2848. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2849. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2850. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2851. boolean&nbsp;useExtrinsicGuess,
  2852. int&nbsp;iterationsCount)</code>
  2853. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2854. </td>
  2855. </tr>
  2856. <tr id="i200" class="altColor">
  2857. <td class="colFirst"><code>static boolean</code></td>
  2858. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2859. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2860. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2861. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2862. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2863. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2864. boolean&nbsp;useExtrinsicGuess,
  2865. int&nbsp;iterationsCount,
  2866. float&nbsp;reprojectionError)</code>
  2867. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2868. </td>
  2869. </tr>
  2870. <tr id="i201" class="rowColor">
  2871. <td class="colFirst"><code>static boolean</code></td>
  2872. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-double-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2873. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2874. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2875. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2876. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2877. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2878. boolean&nbsp;useExtrinsicGuess,
  2879. int&nbsp;iterationsCount,
  2880. float&nbsp;reprojectionError,
  2881. double&nbsp;confidence)</code>
  2882. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2883. </td>
  2884. </tr>
  2885. <tr id="i202" class="altColor">
  2886. <td class="colFirst"><code>static boolean</code></td>
  2887. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-double-org.opencv.core.Mat-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2888. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2889. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2890. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2891. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2892. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2893. boolean&nbsp;useExtrinsicGuess,
  2894. int&nbsp;iterationsCount,
  2895. float&nbsp;reprojectionError,
  2896. double&nbsp;confidence,
  2897. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</code>
  2898. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2899. </td>
  2900. </tr>
  2901. <tr id="i203" class="rowColor">
  2902. <td class="colFirst"><code>static boolean</code></td>
  2903. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-double-org.opencv.core.Mat-int-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2904. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2905. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2906. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2907. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2908. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2909. boolean&nbsp;useExtrinsicGuess,
  2910. int&nbsp;iterationsCount,
  2911. float&nbsp;reprojectionError,
  2912. double&nbsp;confidence,
  2913. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  2914. int&nbsp;flags)</code>
  2915. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.</div>
  2916. </td>
  2917. </tr>
  2918. <tr id="i204" class="altColor">
  2919. <td class="colFirst"><code>static boolean</code></td>
  2920. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2921. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2922. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2923. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2924. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2925. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2926. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</code>&nbsp;</td>
  2927. </tr>
  2928. <tr id="i205" class="rowColor">
  2929. <td class="colFirst"><code>static boolean</code></td>
  2930. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">solvePnPRansac</a></span>(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  2931. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  2932. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2933. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  2934. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2935. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2936. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  2937. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</code>&nbsp;</td>
  2938. </tr>
  2939. <tr id="i206" class="altColor">
  2940. <td class="colFirst"><code>static void</code></td>
  2941. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRefineLM-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnPRefineLM</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2942. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2943. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2944. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2945. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2946. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</code>
  2947. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  2948. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.</div>
  2949. </td>
  2950. </tr>
  2951. <tr id="i207" class="rowColor">
  2952. <td class="colFirst"><code>static void</code></td>
  2953. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRefineLM-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">solvePnPRefineLM</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2954. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2955. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2956. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2957. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2958. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2959. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  2960. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  2961. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.</div>
  2962. </td>
  2963. </tr>
  2964. <tr id="i208" class="altColor">
  2965. <td class="colFirst"><code>static void</code></td>
  2966. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRefineVVS-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">solvePnPRefineVVS</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2967. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2968. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2969. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2970. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2971. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</code>
  2972. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  2973. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.</div>
  2974. </td>
  2975. </tr>
  2976. <tr id="i209" class="rowColor">
  2977. <td class="colFirst"><code>static void</code></td>
  2978. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRefineVVS-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">solvePnPRefineVVS</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2979. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2980. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2981. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2982. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2983. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2984. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  2985. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  2986. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.</div>
  2987. </td>
  2988. </tr>
  2989. <tr id="i210" class="altColor">
  2990. <td class="colFirst"><code>static void</code></td>
  2991. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#solvePnPRefineVVS-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-double-">solvePnPRefineVVS</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  2992. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  2993. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  2994. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  2995. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  2996. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  2997. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria,
  2998. double&nbsp;VVSlambda)</code>
  2999. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  3000. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.</div>
  3001. </td>
  3002. </tr>
  3003. <tr id="i211" class="rowColor">
  3004. <td class="colFirst"><code>static double</code></td>
  3005. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3006. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3007. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3008. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3009. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3010. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3011. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3012. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3013. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3014. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3015. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3016. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F)</code>&nbsp;</td>
  3017. </tr>
  3018. <tr id="i212" class="altColor">
  3019. <td class="colFirst"><code>static double</code></td>
  3020. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3021. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3022. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3023. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3024. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3025. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3026. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3027. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3028. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3029. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3030. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3031. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3032. int&nbsp;flags)</code>&nbsp;</td>
  3033. </tr>
  3034. <tr id="i213" class="rowColor">
  3035. <td class="colFirst"><code>static double</code></td>
  3036. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3037. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3038. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3039. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3040. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3041. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3042. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3043. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3044. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3045. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3046. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3047. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3048. int&nbsp;flags,
  3049. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>&nbsp;</td>
  3050. </tr>
  3051. <tr id="i214" class="altColor">
  3052. <td class="colFirst"><code>static double</code></td>
  3053. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3054. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3055. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3056. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3057. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3058. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3059. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3060. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3061. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3062. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3063. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3064. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3065. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</code>&nbsp;</td>
  3066. </tr>
  3067. <tr id="i215" class="rowColor">
  3068. <td class="colFirst"><code>static double</code></td>
  3069. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3070. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3071. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3072. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3073. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3074. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3075. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3076. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3077. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3078. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3079. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3080. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3081. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  3082. int&nbsp;flags)</code>&nbsp;</td>
  3083. </tr>
  3084. <tr id="i216" class="altColor">
  3085. <td class="colFirst"><code>static double</code></td>
  3086. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">stereoCalibrate</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3087. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3088. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3089. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3090. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3091. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3092. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3093. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3094. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3095. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3096. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3097. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3098. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  3099. int&nbsp;flags,
  3100. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>&nbsp;</td>
  3101. </tr>
  3102. <tr id="i217" class="rowColor">
  3103. <td class="colFirst"><code>static double</code></td>
  3104. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrateExtended-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-">stereoCalibrateExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3105. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3106. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3107. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3108. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3109. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3110. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3111. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3112. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3113. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3114. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3115. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3116. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  3117. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  3118. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</code>
  3119. <div class="block">Calibrates a stereo camera set up.</div>
  3120. </td>
  3121. </tr>
  3122. <tr id="i218" class="altColor">
  3123. <td class="colFirst"><code>static double</code></td>
  3124. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrateExtended-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-">stereoCalibrateExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3125. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3126. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3127. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3128. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3129. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3130. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3131. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3132. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3133. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3134. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3135. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3136. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  3137. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  3138. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  3139. int&nbsp;flags)</code>
  3140. <div class="block">Calibrates a stereo camera set up.</div>
  3141. </td>
  3142. </tr>
  3143. <tr id="i219" class="rowColor">
  3144. <td class="colFirst"><code>static double</code></td>
  3145. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoCalibrateExtended-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">stereoCalibrateExtended</a></span>(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  3146. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  3147. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  3148. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3149. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3150. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3151. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3152. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3153. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3154. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3155. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  3156. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3157. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  3158. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  3159. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  3160. int&nbsp;flags,
  3161. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  3162. <div class="block">Calibrates a stereo camera set up.</div>
  3163. </td>
  3164. </tr>
  3165. <tr id="i220" class="altColor">
  3166. <td class="colFirst"><code>static void</code></td>
  3167. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3168. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3169. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3170. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3171. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3172. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3173. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3174. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  3175. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  3176. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  3177. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  3178. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q)</code>
  3179. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  3180. </td>
  3181. </tr>
  3182. <tr id="i221" class="rowColor">
  3183. <td class="colFirst"><code>static void</code></td>
  3184. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3185. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3186. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3187. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3188. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3189. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3190. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3191. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  3192. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  3193. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  3194. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  3195. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  3196. int&nbsp;flags)</code>
  3197. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  3198. </td>
  3199. </tr>
  3200. <tr id="i222" class="altColor">
  3201. <td class="colFirst"><code>static void</code></td>
  3202. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3203. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3204. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3205. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3206. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3207. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3208. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3209. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  3210. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  3211. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  3212. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  3213. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  3214. int&nbsp;flags,
  3215. double&nbsp;alpha)</code>
  3216. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  3217. </td>
  3218. </tr>
  3219. <tr id="i223" class="rowColor">
  3220. <td class="colFirst"><code>static void</code></td>
  3221. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-org.opencv.core.Size-">stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3222. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3223. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3224. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3225. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3226. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3227. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3228. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  3229. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  3230. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  3231. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  3232. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  3233. int&nbsp;flags,
  3234. double&nbsp;alpha,
  3235. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize)</code>
  3236. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  3237. </td>
  3238. </tr>
  3239. <tr id="i224" class="altColor">
  3240. <td class="colFirst"><code>static void</code></td>
  3241. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-org.opencv.core.Size-org.opencv.core.Rect-">stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3242. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3243. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3244. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3245. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3246. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3247. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3248. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  3249. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  3250. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  3251. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  3252. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  3253. int&nbsp;flags,
  3254. double&nbsp;alpha,
  3255. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  3256. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI1)</code>
  3257. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  3258. </td>
  3259. </tr>
  3260. <tr id="i225" class="rowColor">
  3261. <td class="colFirst"><code>static void</code></td>
  3262. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-org.opencv.core.Size-org.opencv.core.Rect-org.opencv.core.Rect-">stereoRectify</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  3263. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  3264. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  3265. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  3266. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  3267. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3268. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  3269. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  3270. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  3271. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  3272. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  3273. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  3274. int&nbsp;flags,
  3275. double&nbsp;alpha,
  3276. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  3277. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI1,
  3278. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI2)</code>
  3279. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  3280. </td>
  3281. </tr>
  3282. <tr id="i226" class="altColor">
  3283. <td class="colFirst"><code>static boolean</code></td>
  3284. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectifyUncalibrated-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-">stereoRectifyUncalibrated</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  3285. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  3286. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3287. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgSize,
  3288. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H1,
  3289. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H2)</code>
  3290. <div class="block">Computes a rectification transform for an uncalibrated stereo camera.</div>
  3291. </td>
  3292. </tr>
  3293. <tr id="i227" class="rowColor">
  3294. <td class="colFirst"><code>static boolean</code></td>
  3295. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#stereoRectifyUncalibrated-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-">stereoRectifyUncalibrated</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  3296. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  3297. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  3298. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgSize,
  3299. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H1,
  3300. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H2,
  3301. double&nbsp;threshold)</code>
  3302. <div class="block">Computes a rectification transform for an uncalibrated stereo camera.</div>
  3303. </td>
  3304. </tr>
  3305. <tr id="i228" class="altColor">
  3306. <td class="colFirst"><code>static void</code></td>
  3307. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#triangulatePoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">triangulatePoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatr1,
  3308. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatr2,
  3309. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projPoints1,
  3310. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projPoints2,
  3311. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points4D)</code>
  3312. <div class="block">This function reconstructs 3-dimensional points (in homogeneous coordinates) by using
  3313. their observations with a stereo camera.</div>
  3314. </td>
  3315. </tr>
  3316. <tr id="i229" class="rowColor">
  3317. <td class="colFirst"><code>static void</code></td>
  3318. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistort-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">undistort</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  3319. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  3320. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3321. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs)</code>
  3322. <div class="block">Transforms an image to compensate for lens distortion.</div>
  3323. </td>
  3324. </tr>
  3325. <tr id="i230" class="altColor">
  3326. <td class="colFirst"><code>static void</code></td>
  3327. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistort-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">undistort</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  3328. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  3329. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3330. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  3331. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newCameraMatrix)</code>
  3332. <div class="block">Transforms an image to compensate for lens distortion.</div>
  3333. </td>
  3334. </tr>
  3335. <tr id="i231" class="rowColor">
  3336. <td class="colFirst"><code>static void</code></td>
  3337. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistortImagePoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">undistortImagePoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  3338. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  3339. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3340. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs)</code>
  3341. <div class="block">Compute undistorted image points position</div>
  3342. </td>
  3343. </tr>
  3344. <tr id="i232" class="altColor">
  3345. <td class="colFirst"><code>static void</code></td>
  3346. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistortImagePoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">undistortImagePoints</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  3347. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  3348. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3349. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  3350. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;arg1)</code>
  3351. <div class="block">Compute undistorted image points position</div>
  3352. </td>
  3353. </tr>
  3354. <tr id="i233" class="rowColor">
  3355. <td class="colFirst"><code>static void</code></td>
  3356. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistortPoints-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.Mat-">undistortPoints</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;src,
  3357. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dst,
  3358. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3359. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs)</code>
  3360. <div class="block">Computes the ideal point coordinates from the observed point coordinates.</div>
  3361. </td>
  3362. </tr>
  3363. <tr id="i234" class="altColor">
  3364. <td class="colFirst"><code>static void</code></td>
  3365. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistortPoints-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">undistortPoints</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;src,
  3366. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dst,
  3367. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3368. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  3369. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R)</code>
  3370. <div class="block">Computes the ideal point coordinates from the observed point coordinates.</div>
  3371. </td>
  3372. </tr>
  3373. <tr id="i235" class="rowColor">
  3374. <td class="colFirst"><code>static void</code></td>
  3375. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistortPoints-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">undistortPoints</a></span>(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;src,
  3376. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dst,
  3377. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3378. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  3379. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3380. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P)</code>
  3381. <div class="block">Computes the ideal point coordinates from the observed point coordinates.</div>
  3382. </td>
  3383. </tr>
  3384. <tr id="i236" class="altColor">
  3385. <td class="colFirst"><code>static void</code></td>
  3386. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#undistortPointsIter-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">undistortPointsIter</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  3387. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  3388. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  3389. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  3390. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  3391. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  3392. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</code>
  3393. <div class="block"><b>Note:</b> Default version of #undistortPoints does 5 iterations to compute undistorted points.</div>
  3394. </td>
  3395. </tr>
  3396. <tr id="i237" class="rowColor">
  3397. <td class="colFirst"><code>static void</code></td>
  3398. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#validateDisparity-org.opencv.core.Mat-org.opencv.core.Mat-int-int-">validateDisparity</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  3399. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cost,
  3400. int&nbsp;minDisparity,
  3401. int&nbsp;numberOfDisparities)</code>&nbsp;</td>
  3402. </tr>
  3403. <tr id="i238" class="altColor">
  3404. <td class="colFirst"><code>static void</code></td>
  3405. <td class="colLast"><code><span class="memberNameLink"><a href="../../../org/opencv/calib3d/Calib3d.html#validateDisparity-org.opencv.core.Mat-org.opencv.core.Mat-int-int-int-">validateDisparity</a></span>(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  3406. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cost,
  3407. int&nbsp;minDisparity,
  3408. int&nbsp;numberOfDisparities,
  3409. int&nbsp;disp12MaxDisp)</code>&nbsp;</td>
  3410. </tr>
  3411. </table>
  3412. <ul class="blockList">
  3413. <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
  3414. <!-- -->
  3415. </a>
  3416. <h3>Methods inherited from class&nbsp;java.lang.Object</h3>
  3417. <code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
  3418. </ul>
  3419. </li>
  3420. </ul>
  3421. </li>
  3422. </ul>
  3423. </div>
  3424. <div class="details">
  3425. <ul class="blockList">
  3426. <li class="blockList">
  3427. <!-- ============ FIELD DETAIL =========== -->
  3428. <ul class="blockList">
  3429. <li class="blockList"><a name="field.detail">
  3430. <!-- -->
  3431. </a>
  3432. <h3>Field Detail</h3>
  3433. <a name="CALIB_CB_ACCURACY">
  3434. <!-- -->
  3435. </a>
  3436. <ul class="blockList">
  3437. <li class="blockList">
  3438. <h4>CALIB_CB_ACCURACY</h4>
  3439. <pre>public static final&nbsp;int CALIB_CB_ACCURACY</pre>
  3440. <dl>
  3441. <dt><span class="seeLabel">See Also:</span></dt>
  3442. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_ACCURACY">Constant Field Values</a></dd>
  3443. </dl>
  3444. </li>
  3445. </ul>
  3446. <a name="CALIB_CB_ADAPTIVE_THRESH">
  3447. <!-- -->
  3448. </a>
  3449. <ul class="blockList">
  3450. <li class="blockList">
  3451. <h4>CALIB_CB_ADAPTIVE_THRESH</h4>
  3452. <pre>public static final&nbsp;int CALIB_CB_ADAPTIVE_THRESH</pre>
  3453. <dl>
  3454. <dt><span class="seeLabel">See Also:</span></dt>
  3455. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_ADAPTIVE_THRESH">Constant Field Values</a></dd>
  3456. </dl>
  3457. </li>
  3458. </ul>
  3459. <a name="CALIB_CB_ASYMMETRIC_GRID">
  3460. <!-- -->
  3461. </a>
  3462. <ul class="blockList">
  3463. <li class="blockList">
  3464. <h4>CALIB_CB_ASYMMETRIC_GRID</h4>
  3465. <pre>public static final&nbsp;int CALIB_CB_ASYMMETRIC_GRID</pre>
  3466. <dl>
  3467. <dt><span class="seeLabel">See Also:</span></dt>
  3468. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_ASYMMETRIC_GRID">Constant Field Values</a></dd>
  3469. </dl>
  3470. </li>
  3471. </ul>
  3472. <a name="CALIB_CB_CLUSTERING">
  3473. <!-- -->
  3474. </a>
  3475. <ul class="blockList">
  3476. <li class="blockList">
  3477. <h4>CALIB_CB_CLUSTERING</h4>
  3478. <pre>public static final&nbsp;int CALIB_CB_CLUSTERING</pre>
  3479. <dl>
  3480. <dt><span class="seeLabel">See Also:</span></dt>
  3481. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_CLUSTERING">Constant Field Values</a></dd>
  3482. </dl>
  3483. </li>
  3484. </ul>
  3485. <a name="CALIB_CB_EXHAUSTIVE">
  3486. <!-- -->
  3487. </a>
  3488. <ul class="blockList">
  3489. <li class="blockList">
  3490. <h4>CALIB_CB_EXHAUSTIVE</h4>
  3491. <pre>public static final&nbsp;int CALIB_CB_EXHAUSTIVE</pre>
  3492. <dl>
  3493. <dt><span class="seeLabel">See Also:</span></dt>
  3494. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_EXHAUSTIVE">Constant Field Values</a></dd>
  3495. </dl>
  3496. </li>
  3497. </ul>
  3498. <a name="CALIB_CB_FAST_CHECK">
  3499. <!-- -->
  3500. </a>
  3501. <ul class="blockList">
  3502. <li class="blockList">
  3503. <h4>CALIB_CB_FAST_CHECK</h4>
  3504. <pre>public static final&nbsp;int CALIB_CB_FAST_CHECK</pre>
  3505. <dl>
  3506. <dt><span class="seeLabel">See Also:</span></dt>
  3507. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_FAST_CHECK">Constant Field Values</a></dd>
  3508. </dl>
  3509. </li>
  3510. </ul>
  3511. <a name="CALIB_CB_FILTER_QUADS">
  3512. <!-- -->
  3513. </a>
  3514. <ul class="blockList">
  3515. <li class="blockList">
  3516. <h4>CALIB_CB_FILTER_QUADS</h4>
  3517. <pre>public static final&nbsp;int CALIB_CB_FILTER_QUADS</pre>
  3518. <dl>
  3519. <dt><span class="seeLabel">See Also:</span></dt>
  3520. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_FILTER_QUADS">Constant Field Values</a></dd>
  3521. </dl>
  3522. </li>
  3523. </ul>
  3524. <a name="CALIB_CB_LARGER">
  3525. <!-- -->
  3526. </a>
  3527. <ul class="blockList">
  3528. <li class="blockList">
  3529. <h4>CALIB_CB_LARGER</h4>
  3530. <pre>public static final&nbsp;int CALIB_CB_LARGER</pre>
  3531. <dl>
  3532. <dt><span class="seeLabel">See Also:</span></dt>
  3533. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_LARGER">Constant Field Values</a></dd>
  3534. </dl>
  3535. </li>
  3536. </ul>
  3537. <a name="CALIB_CB_MARKER">
  3538. <!-- -->
  3539. </a>
  3540. <ul class="blockList">
  3541. <li class="blockList">
  3542. <h4>CALIB_CB_MARKER</h4>
  3543. <pre>public static final&nbsp;int CALIB_CB_MARKER</pre>
  3544. <dl>
  3545. <dt><span class="seeLabel">See Also:</span></dt>
  3546. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_MARKER">Constant Field Values</a></dd>
  3547. </dl>
  3548. </li>
  3549. </ul>
  3550. <a name="CALIB_CB_NORMALIZE_IMAGE">
  3551. <!-- -->
  3552. </a>
  3553. <ul class="blockList">
  3554. <li class="blockList">
  3555. <h4>CALIB_CB_NORMALIZE_IMAGE</h4>
  3556. <pre>public static final&nbsp;int CALIB_CB_NORMALIZE_IMAGE</pre>
  3557. <dl>
  3558. <dt><span class="seeLabel">See Also:</span></dt>
  3559. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_NORMALIZE_IMAGE">Constant Field Values</a></dd>
  3560. </dl>
  3561. </li>
  3562. </ul>
  3563. <a name="CALIB_CB_SYMMETRIC_GRID">
  3564. <!-- -->
  3565. </a>
  3566. <ul class="blockList">
  3567. <li class="blockList">
  3568. <h4>CALIB_CB_SYMMETRIC_GRID</h4>
  3569. <pre>public static final&nbsp;int CALIB_CB_SYMMETRIC_GRID</pre>
  3570. <dl>
  3571. <dt><span class="seeLabel">See Also:</span></dt>
  3572. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_CB_SYMMETRIC_GRID">Constant Field Values</a></dd>
  3573. </dl>
  3574. </li>
  3575. </ul>
  3576. <a name="CALIB_FIX_ASPECT_RATIO">
  3577. <!-- -->
  3578. </a>
  3579. <ul class="blockList">
  3580. <li class="blockList">
  3581. <h4>CALIB_FIX_ASPECT_RATIO</h4>
  3582. <pre>public static final&nbsp;int CALIB_FIX_ASPECT_RATIO</pre>
  3583. <dl>
  3584. <dt><span class="seeLabel">See Also:</span></dt>
  3585. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_ASPECT_RATIO">Constant Field Values</a></dd>
  3586. </dl>
  3587. </li>
  3588. </ul>
  3589. <a name="CALIB_FIX_FOCAL_LENGTH">
  3590. <!-- -->
  3591. </a>
  3592. <ul class="blockList">
  3593. <li class="blockList">
  3594. <h4>CALIB_FIX_FOCAL_LENGTH</h4>
  3595. <pre>public static final&nbsp;int CALIB_FIX_FOCAL_LENGTH</pre>
  3596. <dl>
  3597. <dt><span class="seeLabel">See Also:</span></dt>
  3598. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_FOCAL_LENGTH">Constant Field Values</a></dd>
  3599. </dl>
  3600. </li>
  3601. </ul>
  3602. <a name="CALIB_FIX_INTRINSIC">
  3603. <!-- -->
  3604. </a>
  3605. <ul class="blockList">
  3606. <li class="blockList">
  3607. <h4>CALIB_FIX_INTRINSIC</h4>
  3608. <pre>public static final&nbsp;int CALIB_FIX_INTRINSIC</pre>
  3609. <dl>
  3610. <dt><span class="seeLabel">See Also:</span></dt>
  3611. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_INTRINSIC">Constant Field Values</a></dd>
  3612. </dl>
  3613. </li>
  3614. </ul>
  3615. <a name="CALIB_FIX_K1">
  3616. <!-- -->
  3617. </a>
  3618. <ul class="blockList">
  3619. <li class="blockList">
  3620. <h4>CALIB_FIX_K1</h4>
  3621. <pre>public static final&nbsp;int CALIB_FIX_K1</pre>
  3622. <dl>
  3623. <dt><span class="seeLabel">See Also:</span></dt>
  3624. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_K1">Constant Field Values</a></dd>
  3625. </dl>
  3626. </li>
  3627. </ul>
  3628. <a name="CALIB_FIX_K2">
  3629. <!-- -->
  3630. </a>
  3631. <ul class="blockList">
  3632. <li class="blockList">
  3633. <h4>CALIB_FIX_K2</h4>
  3634. <pre>public static final&nbsp;int CALIB_FIX_K2</pre>
  3635. <dl>
  3636. <dt><span class="seeLabel">See Also:</span></dt>
  3637. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_K2">Constant Field Values</a></dd>
  3638. </dl>
  3639. </li>
  3640. </ul>
  3641. <a name="CALIB_FIX_K3">
  3642. <!-- -->
  3643. </a>
  3644. <ul class="blockList">
  3645. <li class="blockList">
  3646. <h4>CALIB_FIX_K3</h4>
  3647. <pre>public static final&nbsp;int CALIB_FIX_K3</pre>
  3648. <dl>
  3649. <dt><span class="seeLabel">See Also:</span></dt>
  3650. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_K3">Constant Field Values</a></dd>
  3651. </dl>
  3652. </li>
  3653. </ul>
  3654. <a name="CALIB_FIX_K4">
  3655. <!-- -->
  3656. </a>
  3657. <ul class="blockList">
  3658. <li class="blockList">
  3659. <h4>CALIB_FIX_K4</h4>
  3660. <pre>public static final&nbsp;int CALIB_FIX_K4</pre>
  3661. <dl>
  3662. <dt><span class="seeLabel">See Also:</span></dt>
  3663. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_K4">Constant Field Values</a></dd>
  3664. </dl>
  3665. </li>
  3666. </ul>
  3667. <a name="CALIB_FIX_K5">
  3668. <!-- -->
  3669. </a>
  3670. <ul class="blockList">
  3671. <li class="blockList">
  3672. <h4>CALIB_FIX_K5</h4>
  3673. <pre>public static final&nbsp;int CALIB_FIX_K5</pre>
  3674. <dl>
  3675. <dt><span class="seeLabel">See Also:</span></dt>
  3676. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_K5">Constant Field Values</a></dd>
  3677. </dl>
  3678. </li>
  3679. </ul>
  3680. <a name="CALIB_FIX_K6">
  3681. <!-- -->
  3682. </a>
  3683. <ul class="blockList">
  3684. <li class="blockList">
  3685. <h4>CALIB_FIX_K6</h4>
  3686. <pre>public static final&nbsp;int CALIB_FIX_K6</pre>
  3687. <dl>
  3688. <dt><span class="seeLabel">See Also:</span></dt>
  3689. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_K6">Constant Field Values</a></dd>
  3690. </dl>
  3691. </li>
  3692. </ul>
  3693. <a name="CALIB_FIX_PRINCIPAL_POINT">
  3694. <!-- -->
  3695. </a>
  3696. <ul class="blockList">
  3697. <li class="blockList">
  3698. <h4>CALIB_FIX_PRINCIPAL_POINT</h4>
  3699. <pre>public static final&nbsp;int CALIB_FIX_PRINCIPAL_POINT</pre>
  3700. <dl>
  3701. <dt><span class="seeLabel">See Also:</span></dt>
  3702. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_PRINCIPAL_POINT">Constant Field Values</a></dd>
  3703. </dl>
  3704. </li>
  3705. </ul>
  3706. <a name="CALIB_FIX_S1_S2_S3_S4">
  3707. <!-- -->
  3708. </a>
  3709. <ul class="blockList">
  3710. <li class="blockList">
  3711. <h4>CALIB_FIX_S1_S2_S3_S4</h4>
  3712. <pre>public static final&nbsp;int CALIB_FIX_S1_S2_S3_S4</pre>
  3713. <dl>
  3714. <dt><span class="seeLabel">See Also:</span></dt>
  3715. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_S1_S2_S3_S4">Constant Field Values</a></dd>
  3716. </dl>
  3717. </li>
  3718. </ul>
  3719. <a name="CALIB_FIX_TANGENT_DIST">
  3720. <!-- -->
  3721. </a>
  3722. <ul class="blockList">
  3723. <li class="blockList">
  3724. <h4>CALIB_FIX_TANGENT_DIST</h4>
  3725. <pre>public static final&nbsp;int CALIB_FIX_TANGENT_DIST</pre>
  3726. <dl>
  3727. <dt><span class="seeLabel">See Also:</span></dt>
  3728. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_TANGENT_DIST">Constant Field Values</a></dd>
  3729. </dl>
  3730. </li>
  3731. </ul>
  3732. <a name="CALIB_FIX_TAUX_TAUY">
  3733. <!-- -->
  3734. </a>
  3735. <ul class="blockList">
  3736. <li class="blockList">
  3737. <h4>CALIB_FIX_TAUX_TAUY</h4>
  3738. <pre>public static final&nbsp;int CALIB_FIX_TAUX_TAUY</pre>
  3739. <dl>
  3740. <dt><span class="seeLabel">See Also:</span></dt>
  3741. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_FIX_TAUX_TAUY">Constant Field Values</a></dd>
  3742. </dl>
  3743. </li>
  3744. </ul>
  3745. <a name="CALIB_HAND_EYE_ANDREFF">
  3746. <!-- -->
  3747. </a>
  3748. <ul class="blockList">
  3749. <li class="blockList">
  3750. <h4>CALIB_HAND_EYE_ANDREFF</h4>
  3751. <pre>public static final&nbsp;int CALIB_HAND_EYE_ANDREFF</pre>
  3752. <dl>
  3753. <dt><span class="seeLabel">See Also:</span></dt>
  3754. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_HAND_EYE_ANDREFF">Constant Field Values</a></dd>
  3755. </dl>
  3756. </li>
  3757. </ul>
  3758. <a name="CALIB_HAND_EYE_DANIILIDIS">
  3759. <!-- -->
  3760. </a>
  3761. <ul class="blockList">
  3762. <li class="blockList">
  3763. <h4>CALIB_HAND_EYE_DANIILIDIS</h4>
  3764. <pre>public static final&nbsp;int CALIB_HAND_EYE_DANIILIDIS</pre>
  3765. <dl>
  3766. <dt><span class="seeLabel">See Also:</span></dt>
  3767. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_HAND_EYE_DANIILIDIS">Constant Field Values</a></dd>
  3768. </dl>
  3769. </li>
  3770. </ul>
  3771. <a name="CALIB_HAND_EYE_HORAUD">
  3772. <!-- -->
  3773. </a>
  3774. <ul class="blockList">
  3775. <li class="blockList">
  3776. <h4>CALIB_HAND_EYE_HORAUD</h4>
  3777. <pre>public static final&nbsp;int CALIB_HAND_EYE_HORAUD</pre>
  3778. <dl>
  3779. <dt><span class="seeLabel">See Also:</span></dt>
  3780. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_HAND_EYE_HORAUD">Constant Field Values</a></dd>
  3781. </dl>
  3782. </li>
  3783. </ul>
  3784. <a name="CALIB_HAND_EYE_PARK">
  3785. <!-- -->
  3786. </a>
  3787. <ul class="blockList">
  3788. <li class="blockList">
  3789. <h4>CALIB_HAND_EYE_PARK</h4>
  3790. <pre>public static final&nbsp;int CALIB_HAND_EYE_PARK</pre>
  3791. <dl>
  3792. <dt><span class="seeLabel">See Also:</span></dt>
  3793. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_HAND_EYE_PARK">Constant Field Values</a></dd>
  3794. </dl>
  3795. </li>
  3796. </ul>
  3797. <a name="CALIB_HAND_EYE_TSAI">
  3798. <!-- -->
  3799. </a>
  3800. <ul class="blockList">
  3801. <li class="blockList">
  3802. <h4>CALIB_HAND_EYE_TSAI</h4>
  3803. <pre>public static final&nbsp;int CALIB_HAND_EYE_TSAI</pre>
  3804. <dl>
  3805. <dt><span class="seeLabel">See Also:</span></dt>
  3806. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_HAND_EYE_TSAI">Constant Field Values</a></dd>
  3807. </dl>
  3808. </li>
  3809. </ul>
  3810. <a name="CALIB_NINTRINSIC">
  3811. <!-- -->
  3812. </a>
  3813. <ul class="blockList">
  3814. <li class="blockList">
  3815. <h4>CALIB_NINTRINSIC</h4>
  3816. <pre>public static final&nbsp;int CALIB_NINTRINSIC</pre>
  3817. <dl>
  3818. <dt><span class="seeLabel">See Also:</span></dt>
  3819. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_NINTRINSIC">Constant Field Values</a></dd>
  3820. </dl>
  3821. </li>
  3822. </ul>
  3823. <a name="CALIB_RATIONAL_MODEL">
  3824. <!-- -->
  3825. </a>
  3826. <ul class="blockList">
  3827. <li class="blockList">
  3828. <h4>CALIB_RATIONAL_MODEL</h4>
  3829. <pre>public static final&nbsp;int CALIB_RATIONAL_MODEL</pre>
  3830. <dl>
  3831. <dt><span class="seeLabel">See Also:</span></dt>
  3832. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_RATIONAL_MODEL">Constant Field Values</a></dd>
  3833. </dl>
  3834. </li>
  3835. </ul>
  3836. <a name="CALIB_ROBOT_WORLD_HAND_EYE_LI">
  3837. <!-- -->
  3838. </a>
  3839. <ul class="blockList">
  3840. <li class="blockList">
  3841. <h4>CALIB_ROBOT_WORLD_HAND_EYE_LI</h4>
  3842. <pre>public static final&nbsp;int CALIB_ROBOT_WORLD_HAND_EYE_LI</pre>
  3843. <dl>
  3844. <dt><span class="seeLabel">See Also:</span></dt>
  3845. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_ROBOT_WORLD_HAND_EYE_LI">Constant Field Values</a></dd>
  3846. </dl>
  3847. </li>
  3848. </ul>
  3849. <a name="CALIB_ROBOT_WORLD_HAND_EYE_SHAH">
  3850. <!-- -->
  3851. </a>
  3852. <ul class="blockList">
  3853. <li class="blockList">
  3854. <h4>CALIB_ROBOT_WORLD_HAND_EYE_SHAH</h4>
  3855. <pre>public static final&nbsp;int CALIB_ROBOT_WORLD_HAND_EYE_SHAH</pre>
  3856. <dl>
  3857. <dt><span class="seeLabel">See Also:</span></dt>
  3858. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_ROBOT_WORLD_HAND_EYE_SHAH">Constant Field Values</a></dd>
  3859. </dl>
  3860. </li>
  3861. </ul>
  3862. <a name="CALIB_SAME_FOCAL_LENGTH">
  3863. <!-- -->
  3864. </a>
  3865. <ul class="blockList">
  3866. <li class="blockList">
  3867. <h4>CALIB_SAME_FOCAL_LENGTH</h4>
  3868. <pre>public static final&nbsp;int CALIB_SAME_FOCAL_LENGTH</pre>
  3869. <dl>
  3870. <dt><span class="seeLabel">See Also:</span></dt>
  3871. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_SAME_FOCAL_LENGTH">Constant Field Values</a></dd>
  3872. </dl>
  3873. </li>
  3874. </ul>
  3875. <a name="CALIB_THIN_PRISM_MODEL">
  3876. <!-- -->
  3877. </a>
  3878. <ul class="blockList">
  3879. <li class="blockList">
  3880. <h4>CALIB_THIN_PRISM_MODEL</h4>
  3881. <pre>public static final&nbsp;int CALIB_THIN_PRISM_MODEL</pre>
  3882. <dl>
  3883. <dt><span class="seeLabel">See Also:</span></dt>
  3884. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_THIN_PRISM_MODEL">Constant Field Values</a></dd>
  3885. </dl>
  3886. </li>
  3887. </ul>
  3888. <a name="CALIB_TILTED_MODEL">
  3889. <!-- -->
  3890. </a>
  3891. <ul class="blockList">
  3892. <li class="blockList">
  3893. <h4>CALIB_TILTED_MODEL</h4>
  3894. <pre>public static final&nbsp;int CALIB_TILTED_MODEL</pre>
  3895. <dl>
  3896. <dt><span class="seeLabel">See Also:</span></dt>
  3897. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_TILTED_MODEL">Constant Field Values</a></dd>
  3898. </dl>
  3899. </li>
  3900. </ul>
  3901. <a name="CALIB_USE_EXTRINSIC_GUESS">
  3902. <!-- -->
  3903. </a>
  3904. <ul class="blockList">
  3905. <li class="blockList">
  3906. <h4>CALIB_USE_EXTRINSIC_GUESS</h4>
  3907. <pre>public static final&nbsp;int CALIB_USE_EXTRINSIC_GUESS</pre>
  3908. <dl>
  3909. <dt><span class="seeLabel">See Also:</span></dt>
  3910. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_USE_EXTRINSIC_GUESS">Constant Field Values</a></dd>
  3911. </dl>
  3912. </li>
  3913. </ul>
  3914. <a name="CALIB_USE_INTRINSIC_GUESS">
  3915. <!-- -->
  3916. </a>
  3917. <ul class="blockList">
  3918. <li class="blockList">
  3919. <h4>CALIB_USE_INTRINSIC_GUESS</h4>
  3920. <pre>public static final&nbsp;int CALIB_USE_INTRINSIC_GUESS</pre>
  3921. <dl>
  3922. <dt><span class="seeLabel">See Also:</span></dt>
  3923. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_USE_INTRINSIC_GUESS">Constant Field Values</a></dd>
  3924. </dl>
  3925. </li>
  3926. </ul>
  3927. <a name="CALIB_USE_LU">
  3928. <!-- -->
  3929. </a>
  3930. <ul class="blockList">
  3931. <li class="blockList">
  3932. <h4>CALIB_USE_LU</h4>
  3933. <pre>public static final&nbsp;int CALIB_USE_LU</pre>
  3934. <dl>
  3935. <dt><span class="seeLabel">See Also:</span></dt>
  3936. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_USE_LU">Constant Field Values</a></dd>
  3937. </dl>
  3938. </li>
  3939. </ul>
  3940. <a name="CALIB_USE_QR">
  3941. <!-- -->
  3942. </a>
  3943. <ul class="blockList">
  3944. <li class="blockList">
  3945. <h4>CALIB_USE_QR</h4>
  3946. <pre>public static final&nbsp;int CALIB_USE_QR</pre>
  3947. <dl>
  3948. <dt><span class="seeLabel">See Also:</span></dt>
  3949. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_USE_QR">Constant Field Values</a></dd>
  3950. </dl>
  3951. </li>
  3952. </ul>
  3953. <a name="CALIB_ZERO_DISPARITY">
  3954. <!-- -->
  3955. </a>
  3956. <ul class="blockList">
  3957. <li class="blockList">
  3958. <h4>CALIB_ZERO_DISPARITY</h4>
  3959. <pre>public static final&nbsp;int CALIB_ZERO_DISPARITY</pre>
  3960. <dl>
  3961. <dt><span class="seeLabel">See Also:</span></dt>
  3962. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_ZERO_DISPARITY">Constant Field Values</a></dd>
  3963. </dl>
  3964. </li>
  3965. </ul>
  3966. <a name="CALIB_ZERO_TANGENT_DIST">
  3967. <!-- -->
  3968. </a>
  3969. <ul class="blockList">
  3970. <li class="blockList">
  3971. <h4>CALIB_ZERO_TANGENT_DIST</h4>
  3972. <pre>public static final&nbsp;int CALIB_ZERO_TANGENT_DIST</pre>
  3973. <dl>
  3974. <dt><span class="seeLabel">See Also:</span></dt>
  3975. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CALIB_ZERO_TANGENT_DIST">Constant Field Values</a></dd>
  3976. </dl>
  3977. </li>
  3978. </ul>
  3979. <a name="CirclesGridFinderParameters_ASYMMETRIC_GRID">
  3980. <!-- -->
  3981. </a>
  3982. <ul class="blockList">
  3983. <li class="blockList">
  3984. <h4>CirclesGridFinderParameters_ASYMMETRIC_GRID</h4>
  3985. <pre>public static final&nbsp;int CirclesGridFinderParameters_ASYMMETRIC_GRID</pre>
  3986. <dl>
  3987. <dt><span class="seeLabel">See Also:</span></dt>
  3988. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CirclesGridFinderParameters_ASYMMETRIC_GRID">Constant Field Values</a></dd>
  3989. </dl>
  3990. </li>
  3991. </ul>
  3992. <a name="CirclesGridFinderParameters_SYMMETRIC_GRID">
  3993. <!-- -->
  3994. </a>
  3995. <ul class="blockList">
  3996. <li class="blockList">
  3997. <h4>CirclesGridFinderParameters_SYMMETRIC_GRID</h4>
  3998. <pre>public static final&nbsp;int CirclesGridFinderParameters_SYMMETRIC_GRID</pre>
  3999. <dl>
  4000. <dt><span class="seeLabel">See Also:</span></dt>
  4001. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CirclesGridFinderParameters_SYMMETRIC_GRID">Constant Field Values</a></dd>
  4002. </dl>
  4003. </li>
  4004. </ul>
  4005. <a name="COV_POLISHER">
  4006. <!-- -->
  4007. </a>
  4008. <ul class="blockList">
  4009. <li class="blockList">
  4010. <h4>COV_POLISHER</h4>
  4011. <pre>public static final&nbsp;int COV_POLISHER</pre>
  4012. <dl>
  4013. <dt><span class="seeLabel">See Also:</span></dt>
  4014. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.COV_POLISHER">Constant Field Values</a></dd>
  4015. </dl>
  4016. </li>
  4017. </ul>
  4018. <a name="CV_DLS">
  4019. <!-- -->
  4020. </a>
  4021. <ul class="blockList">
  4022. <li class="blockList">
  4023. <h4>CV_DLS</h4>
  4024. <pre>public static final&nbsp;int CV_DLS</pre>
  4025. <dl>
  4026. <dt><span class="seeLabel">See Also:</span></dt>
  4027. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CV_DLS">Constant Field Values</a></dd>
  4028. </dl>
  4029. </li>
  4030. </ul>
  4031. <a name="CV_EPNP">
  4032. <!-- -->
  4033. </a>
  4034. <ul class="blockList">
  4035. <li class="blockList">
  4036. <h4>CV_EPNP</h4>
  4037. <pre>public static final&nbsp;int CV_EPNP</pre>
  4038. <dl>
  4039. <dt><span class="seeLabel">See Also:</span></dt>
  4040. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CV_EPNP">Constant Field Values</a></dd>
  4041. </dl>
  4042. </li>
  4043. </ul>
  4044. <a name="CV_ITERATIVE">
  4045. <!-- -->
  4046. </a>
  4047. <ul class="blockList">
  4048. <li class="blockList">
  4049. <h4>CV_ITERATIVE</h4>
  4050. <pre>public static final&nbsp;int CV_ITERATIVE</pre>
  4051. <dl>
  4052. <dt><span class="seeLabel">See Also:</span></dt>
  4053. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CV_ITERATIVE">Constant Field Values</a></dd>
  4054. </dl>
  4055. </li>
  4056. </ul>
  4057. <a name="CV_P3P">
  4058. <!-- -->
  4059. </a>
  4060. <ul class="blockList">
  4061. <li class="blockList">
  4062. <h4>CV_P3P</h4>
  4063. <pre>public static final&nbsp;int CV_P3P</pre>
  4064. <dl>
  4065. <dt><span class="seeLabel">See Also:</span></dt>
  4066. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CV_P3P">Constant Field Values</a></dd>
  4067. </dl>
  4068. </li>
  4069. </ul>
  4070. <a name="CvLevMarq_CALC_J">
  4071. <!-- -->
  4072. </a>
  4073. <ul class="blockList">
  4074. <li class="blockList">
  4075. <h4>CvLevMarq_CALC_J</h4>
  4076. <pre>public static final&nbsp;int CvLevMarq_CALC_J</pre>
  4077. <dl>
  4078. <dt><span class="seeLabel">See Also:</span></dt>
  4079. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CvLevMarq_CALC_J">Constant Field Values</a></dd>
  4080. </dl>
  4081. </li>
  4082. </ul>
  4083. <a name="CvLevMarq_CHECK_ERR">
  4084. <!-- -->
  4085. </a>
  4086. <ul class="blockList">
  4087. <li class="blockList">
  4088. <h4>CvLevMarq_CHECK_ERR</h4>
  4089. <pre>public static final&nbsp;int CvLevMarq_CHECK_ERR</pre>
  4090. <dl>
  4091. <dt><span class="seeLabel">See Also:</span></dt>
  4092. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CvLevMarq_CHECK_ERR">Constant Field Values</a></dd>
  4093. </dl>
  4094. </li>
  4095. </ul>
  4096. <a name="CvLevMarq_DONE">
  4097. <!-- -->
  4098. </a>
  4099. <ul class="blockList">
  4100. <li class="blockList">
  4101. <h4>CvLevMarq_DONE</h4>
  4102. <pre>public static final&nbsp;int CvLevMarq_DONE</pre>
  4103. <dl>
  4104. <dt><span class="seeLabel">See Also:</span></dt>
  4105. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CvLevMarq_DONE">Constant Field Values</a></dd>
  4106. </dl>
  4107. </li>
  4108. </ul>
  4109. <a name="CvLevMarq_STARTED">
  4110. <!-- -->
  4111. </a>
  4112. <ul class="blockList">
  4113. <li class="blockList">
  4114. <h4>CvLevMarq_STARTED</h4>
  4115. <pre>public static final&nbsp;int CvLevMarq_STARTED</pre>
  4116. <dl>
  4117. <dt><span class="seeLabel">See Also:</span></dt>
  4118. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.CvLevMarq_STARTED">Constant Field Values</a></dd>
  4119. </dl>
  4120. </li>
  4121. </ul>
  4122. <a name="fisheye_CALIB_CHECK_COND">
  4123. <!-- -->
  4124. </a>
  4125. <ul class="blockList">
  4126. <li class="blockList">
  4127. <h4>fisheye_CALIB_CHECK_COND</h4>
  4128. <pre>public static final&nbsp;int fisheye_CALIB_CHECK_COND</pre>
  4129. <dl>
  4130. <dt><span class="seeLabel">See Also:</span></dt>
  4131. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_CHECK_COND">Constant Field Values</a></dd>
  4132. </dl>
  4133. </li>
  4134. </ul>
  4135. <a name="fisheye_CALIB_FIX_FOCAL_LENGTH">
  4136. <!-- -->
  4137. </a>
  4138. <ul class="blockList">
  4139. <li class="blockList">
  4140. <h4>fisheye_CALIB_FIX_FOCAL_LENGTH</h4>
  4141. <pre>public static final&nbsp;int fisheye_CALIB_FIX_FOCAL_LENGTH</pre>
  4142. <dl>
  4143. <dt><span class="seeLabel">See Also:</span></dt>
  4144. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_FOCAL_LENGTH">Constant Field Values</a></dd>
  4145. </dl>
  4146. </li>
  4147. </ul>
  4148. <a name="fisheye_CALIB_FIX_INTRINSIC">
  4149. <!-- -->
  4150. </a>
  4151. <ul class="blockList">
  4152. <li class="blockList">
  4153. <h4>fisheye_CALIB_FIX_INTRINSIC</h4>
  4154. <pre>public static final&nbsp;int fisheye_CALIB_FIX_INTRINSIC</pre>
  4155. <dl>
  4156. <dt><span class="seeLabel">See Also:</span></dt>
  4157. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_INTRINSIC">Constant Field Values</a></dd>
  4158. </dl>
  4159. </li>
  4160. </ul>
  4161. <a name="fisheye_CALIB_FIX_K1">
  4162. <!-- -->
  4163. </a>
  4164. <ul class="blockList">
  4165. <li class="blockList">
  4166. <h4>fisheye_CALIB_FIX_K1</h4>
  4167. <pre>public static final&nbsp;int fisheye_CALIB_FIX_K1</pre>
  4168. <dl>
  4169. <dt><span class="seeLabel">See Also:</span></dt>
  4170. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_K1">Constant Field Values</a></dd>
  4171. </dl>
  4172. </li>
  4173. </ul>
  4174. <a name="fisheye_CALIB_FIX_K2">
  4175. <!-- -->
  4176. </a>
  4177. <ul class="blockList">
  4178. <li class="blockList">
  4179. <h4>fisheye_CALIB_FIX_K2</h4>
  4180. <pre>public static final&nbsp;int fisheye_CALIB_FIX_K2</pre>
  4181. <dl>
  4182. <dt><span class="seeLabel">See Also:</span></dt>
  4183. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_K2">Constant Field Values</a></dd>
  4184. </dl>
  4185. </li>
  4186. </ul>
  4187. <a name="fisheye_CALIB_FIX_K3">
  4188. <!-- -->
  4189. </a>
  4190. <ul class="blockList">
  4191. <li class="blockList">
  4192. <h4>fisheye_CALIB_FIX_K3</h4>
  4193. <pre>public static final&nbsp;int fisheye_CALIB_FIX_K3</pre>
  4194. <dl>
  4195. <dt><span class="seeLabel">See Also:</span></dt>
  4196. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_K3">Constant Field Values</a></dd>
  4197. </dl>
  4198. </li>
  4199. </ul>
  4200. <a name="fisheye_CALIB_FIX_K4">
  4201. <!-- -->
  4202. </a>
  4203. <ul class="blockList">
  4204. <li class="blockList">
  4205. <h4>fisheye_CALIB_FIX_K4</h4>
  4206. <pre>public static final&nbsp;int fisheye_CALIB_FIX_K4</pre>
  4207. <dl>
  4208. <dt><span class="seeLabel">See Also:</span></dt>
  4209. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_K4">Constant Field Values</a></dd>
  4210. </dl>
  4211. </li>
  4212. </ul>
  4213. <a name="fisheye_CALIB_FIX_PRINCIPAL_POINT">
  4214. <!-- -->
  4215. </a>
  4216. <ul class="blockList">
  4217. <li class="blockList">
  4218. <h4>fisheye_CALIB_FIX_PRINCIPAL_POINT</h4>
  4219. <pre>public static final&nbsp;int fisheye_CALIB_FIX_PRINCIPAL_POINT</pre>
  4220. <dl>
  4221. <dt><span class="seeLabel">See Also:</span></dt>
  4222. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_PRINCIPAL_POINT">Constant Field Values</a></dd>
  4223. </dl>
  4224. </li>
  4225. </ul>
  4226. <a name="fisheye_CALIB_FIX_SKEW">
  4227. <!-- -->
  4228. </a>
  4229. <ul class="blockList">
  4230. <li class="blockList">
  4231. <h4>fisheye_CALIB_FIX_SKEW</h4>
  4232. <pre>public static final&nbsp;int fisheye_CALIB_FIX_SKEW</pre>
  4233. <dl>
  4234. <dt><span class="seeLabel">See Also:</span></dt>
  4235. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_FIX_SKEW">Constant Field Values</a></dd>
  4236. </dl>
  4237. </li>
  4238. </ul>
  4239. <a name="fisheye_CALIB_RECOMPUTE_EXTRINSIC">
  4240. <!-- -->
  4241. </a>
  4242. <ul class="blockList">
  4243. <li class="blockList">
  4244. <h4>fisheye_CALIB_RECOMPUTE_EXTRINSIC</h4>
  4245. <pre>public static final&nbsp;int fisheye_CALIB_RECOMPUTE_EXTRINSIC</pre>
  4246. <dl>
  4247. <dt><span class="seeLabel">See Also:</span></dt>
  4248. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_RECOMPUTE_EXTRINSIC">Constant Field Values</a></dd>
  4249. </dl>
  4250. </li>
  4251. </ul>
  4252. <a name="fisheye_CALIB_USE_INTRINSIC_GUESS">
  4253. <!-- -->
  4254. </a>
  4255. <ul class="blockList">
  4256. <li class="blockList">
  4257. <h4>fisheye_CALIB_USE_INTRINSIC_GUESS</h4>
  4258. <pre>public static final&nbsp;int fisheye_CALIB_USE_INTRINSIC_GUESS</pre>
  4259. <dl>
  4260. <dt><span class="seeLabel">See Also:</span></dt>
  4261. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_USE_INTRINSIC_GUESS">Constant Field Values</a></dd>
  4262. </dl>
  4263. </li>
  4264. </ul>
  4265. <a name="fisheye_CALIB_ZERO_DISPARITY">
  4266. <!-- -->
  4267. </a>
  4268. <ul class="blockList">
  4269. <li class="blockList">
  4270. <h4>fisheye_CALIB_ZERO_DISPARITY</h4>
  4271. <pre>public static final&nbsp;int fisheye_CALIB_ZERO_DISPARITY</pre>
  4272. <dl>
  4273. <dt><span class="seeLabel">See Also:</span></dt>
  4274. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.fisheye_CALIB_ZERO_DISPARITY">Constant Field Values</a></dd>
  4275. </dl>
  4276. </li>
  4277. </ul>
  4278. <a name="FM_7POINT">
  4279. <!-- -->
  4280. </a>
  4281. <ul class="blockList">
  4282. <li class="blockList">
  4283. <h4>FM_7POINT</h4>
  4284. <pre>public static final&nbsp;int FM_7POINT</pre>
  4285. <dl>
  4286. <dt><span class="seeLabel">See Also:</span></dt>
  4287. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.FM_7POINT">Constant Field Values</a></dd>
  4288. </dl>
  4289. </li>
  4290. </ul>
  4291. <a name="FM_8POINT">
  4292. <!-- -->
  4293. </a>
  4294. <ul class="blockList">
  4295. <li class="blockList">
  4296. <h4>FM_8POINT</h4>
  4297. <pre>public static final&nbsp;int FM_8POINT</pre>
  4298. <dl>
  4299. <dt><span class="seeLabel">See Also:</span></dt>
  4300. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.FM_8POINT">Constant Field Values</a></dd>
  4301. </dl>
  4302. </li>
  4303. </ul>
  4304. <a name="FM_LMEDS">
  4305. <!-- -->
  4306. </a>
  4307. <ul class="blockList">
  4308. <li class="blockList">
  4309. <h4>FM_LMEDS</h4>
  4310. <pre>public static final&nbsp;int FM_LMEDS</pre>
  4311. <dl>
  4312. <dt><span class="seeLabel">See Also:</span></dt>
  4313. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.FM_LMEDS">Constant Field Values</a></dd>
  4314. </dl>
  4315. </li>
  4316. </ul>
  4317. <a name="FM_RANSAC">
  4318. <!-- -->
  4319. </a>
  4320. <ul class="blockList">
  4321. <li class="blockList">
  4322. <h4>FM_RANSAC</h4>
  4323. <pre>public static final&nbsp;int FM_RANSAC</pre>
  4324. <dl>
  4325. <dt><span class="seeLabel">See Also:</span></dt>
  4326. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.FM_RANSAC">Constant Field Values</a></dd>
  4327. </dl>
  4328. </li>
  4329. </ul>
  4330. <a name="LMEDS">
  4331. <!-- -->
  4332. </a>
  4333. <ul class="blockList">
  4334. <li class="blockList">
  4335. <h4>LMEDS</h4>
  4336. <pre>public static final&nbsp;int LMEDS</pre>
  4337. <dl>
  4338. <dt><span class="seeLabel">See Also:</span></dt>
  4339. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LMEDS">Constant Field Values</a></dd>
  4340. </dl>
  4341. </li>
  4342. </ul>
  4343. <a name="LOCAL_OPTIM_GC">
  4344. <!-- -->
  4345. </a>
  4346. <ul class="blockList">
  4347. <li class="blockList">
  4348. <h4>LOCAL_OPTIM_GC</h4>
  4349. <pre>public static final&nbsp;int LOCAL_OPTIM_GC</pre>
  4350. <dl>
  4351. <dt><span class="seeLabel">See Also:</span></dt>
  4352. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LOCAL_OPTIM_GC">Constant Field Values</a></dd>
  4353. </dl>
  4354. </li>
  4355. </ul>
  4356. <a name="LOCAL_OPTIM_INNER_AND_ITER_LO">
  4357. <!-- -->
  4358. </a>
  4359. <ul class="blockList">
  4360. <li class="blockList">
  4361. <h4>LOCAL_OPTIM_INNER_AND_ITER_LO</h4>
  4362. <pre>public static final&nbsp;int LOCAL_OPTIM_INNER_AND_ITER_LO</pre>
  4363. <dl>
  4364. <dt><span class="seeLabel">See Also:</span></dt>
  4365. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LOCAL_OPTIM_INNER_AND_ITER_LO">Constant Field Values</a></dd>
  4366. </dl>
  4367. </li>
  4368. </ul>
  4369. <a name="LOCAL_OPTIM_INNER_LO">
  4370. <!-- -->
  4371. </a>
  4372. <ul class="blockList">
  4373. <li class="blockList">
  4374. <h4>LOCAL_OPTIM_INNER_LO</h4>
  4375. <pre>public static final&nbsp;int LOCAL_OPTIM_INNER_LO</pre>
  4376. <dl>
  4377. <dt><span class="seeLabel">See Also:</span></dt>
  4378. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LOCAL_OPTIM_INNER_LO">Constant Field Values</a></dd>
  4379. </dl>
  4380. </li>
  4381. </ul>
  4382. <a name="LOCAL_OPTIM_NULL">
  4383. <!-- -->
  4384. </a>
  4385. <ul class="blockList">
  4386. <li class="blockList">
  4387. <h4>LOCAL_OPTIM_NULL</h4>
  4388. <pre>public static final&nbsp;int LOCAL_OPTIM_NULL</pre>
  4389. <dl>
  4390. <dt><span class="seeLabel">See Also:</span></dt>
  4391. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LOCAL_OPTIM_NULL">Constant Field Values</a></dd>
  4392. </dl>
  4393. </li>
  4394. </ul>
  4395. <a name="LOCAL_OPTIM_SIGMA">
  4396. <!-- -->
  4397. </a>
  4398. <ul class="blockList">
  4399. <li class="blockList">
  4400. <h4>LOCAL_OPTIM_SIGMA</h4>
  4401. <pre>public static final&nbsp;int LOCAL_OPTIM_SIGMA</pre>
  4402. <dl>
  4403. <dt><span class="seeLabel">See Also:</span></dt>
  4404. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LOCAL_OPTIM_SIGMA">Constant Field Values</a></dd>
  4405. </dl>
  4406. </li>
  4407. </ul>
  4408. <a name="LSQ_POLISHER">
  4409. <!-- -->
  4410. </a>
  4411. <ul class="blockList">
  4412. <li class="blockList">
  4413. <h4>LSQ_POLISHER</h4>
  4414. <pre>public static final&nbsp;int LSQ_POLISHER</pre>
  4415. <dl>
  4416. <dt><span class="seeLabel">See Also:</span></dt>
  4417. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.LSQ_POLISHER">Constant Field Values</a></dd>
  4418. </dl>
  4419. </li>
  4420. </ul>
  4421. <a name="MAGSAC">
  4422. <!-- -->
  4423. </a>
  4424. <ul class="blockList">
  4425. <li class="blockList">
  4426. <h4>MAGSAC</h4>
  4427. <pre>public static final&nbsp;int MAGSAC</pre>
  4428. <dl>
  4429. <dt><span class="seeLabel">See Also:</span></dt>
  4430. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.MAGSAC">Constant Field Values</a></dd>
  4431. </dl>
  4432. </li>
  4433. </ul>
  4434. <a name="NEIGH_FLANN_KNN">
  4435. <!-- -->
  4436. </a>
  4437. <ul class="blockList">
  4438. <li class="blockList">
  4439. <h4>NEIGH_FLANN_KNN</h4>
  4440. <pre>public static final&nbsp;int NEIGH_FLANN_KNN</pre>
  4441. <dl>
  4442. <dt><span class="seeLabel">See Also:</span></dt>
  4443. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.NEIGH_FLANN_KNN">Constant Field Values</a></dd>
  4444. </dl>
  4445. </li>
  4446. </ul>
  4447. <a name="NEIGH_FLANN_RADIUS">
  4448. <!-- -->
  4449. </a>
  4450. <ul class="blockList">
  4451. <li class="blockList">
  4452. <h4>NEIGH_FLANN_RADIUS</h4>
  4453. <pre>public static final&nbsp;int NEIGH_FLANN_RADIUS</pre>
  4454. <dl>
  4455. <dt><span class="seeLabel">See Also:</span></dt>
  4456. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.NEIGH_FLANN_RADIUS">Constant Field Values</a></dd>
  4457. </dl>
  4458. </li>
  4459. </ul>
  4460. <a name="NEIGH_GRID">
  4461. <!-- -->
  4462. </a>
  4463. <ul class="blockList">
  4464. <li class="blockList">
  4465. <h4>NEIGH_GRID</h4>
  4466. <pre>public static final&nbsp;int NEIGH_GRID</pre>
  4467. <dl>
  4468. <dt><span class="seeLabel">See Also:</span></dt>
  4469. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.NEIGH_GRID">Constant Field Values</a></dd>
  4470. </dl>
  4471. </li>
  4472. </ul>
  4473. <a name="NONE_POLISHER">
  4474. <!-- -->
  4475. </a>
  4476. <ul class="blockList">
  4477. <li class="blockList">
  4478. <h4>NONE_POLISHER</h4>
  4479. <pre>public static final&nbsp;int NONE_POLISHER</pre>
  4480. <dl>
  4481. <dt><span class="seeLabel">See Also:</span></dt>
  4482. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.NONE_POLISHER">Constant Field Values</a></dd>
  4483. </dl>
  4484. </li>
  4485. </ul>
  4486. <a name="PROJ_SPHERICAL_EQRECT">
  4487. <!-- -->
  4488. </a>
  4489. <ul class="blockList">
  4490. <li class="blockList">
  4491. <h4>PROJ_SPHERICAL_EQRECT</h4>
  4492. <pre>public static final&nbsp;int PROJ_SPHERICAL_EQRECT</pre>
  4493. <dl>
  4494. <dt><span class="seeLabel">See Also:</span></dt>
  4495. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.PROJ_SPHERICAL_EQRECT">Constant Field Values</a></dd>
  4496. </dl>
  4497. </li>
  4498. </ul>
  4499. <a name="PROJ_SPHERICAL_ORTHO">
  4500. <!-- -->
  4501. </a>
  4502. <ul class="blockList">
  4503. <li class="blockList">
  4504. <h4>PROJ_SPHERICAL_ORTHO</h4>
  4505. <pre>public static final&nbsp;int PROJ_SPHERICAL_ORTHO</pre>
  4506. <dl>
  4507. <dt><span class="seeLabel">See Also:</span></dt>
  4508. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.PROJ_SPHERICAL_ORTHO">Constant Field Values</a></dd>
  4509. </dl>
  4510. </li>
  4511. </ul>
  4512. <a name="RANSAC">
  4513. <!-- -->
  4514. </a>
  4515. <ul class="blockList">
  4516. <li class="blockList">
  4517. <h4>RANSAC</h4>
  4518. <pre>public static final&nbsp;int RANSAC</pre>
  4519. <dl>
  4520. <dt><span class="seeLabel">See Also:</span></dt>
  4521. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.RANSAC">Constant Field Values</a></dd>
  4522. </dl>
  4523. </li>
  4524. </ul>
  4525. <a name="RHO">
  4526. <!-- -->
  4527. </a>
  4528. <ul class="blockList">
  4529. <li class="blockList">
  4530. <h4>RHO</h4>
  4531. <pre>public static final&nbsp;int RHO</pre>
  4532. <dl>
  4533. <dt><span class="seeLabel">See Also:</span></dt>
  4534. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.RHO">Constant Field Values</a></dd>
  4535. </dl>
  4536. </li>
  4537. </ul>
  4538. <a name="SAMPLING_NAPSAC">
  4539. <!-- -->
  4540. </a>
  4541. <ul class="blockList">
  4542. <li class="blockList">
  4543. <h4>SAMPLING_NAPSAC</h4>
  4544. <pre>public static final&nbsp;int SAMPLING_NAPSAC</pre>
  4545. <dl>
  4546. <dt><span class="seeLabel">See Also:</span></dt>
  4547. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SAMPLING_NAPSAC">Constant Field Values</a></dd>
  4548. </dl>
  4549. </li>
  4550. </ul>
  4551. <a name="SAMPLING_PROGRESSIVE_NAPSAC">
  4552. <!-- -->
  4553. </a>
  4554. <ul class="blockList">
  4555. <li class="blockList">
  4556. <h4>SAMPLING_PROGRESSIVE_NAPSAC</h4>
  4557. <pre>public static final&nbsp;int SAMPLING_PROGRESSIVE_NAPSAC</pre>
  4558. <dl>
  4559. <dt><span class="seeLabel">See Also:</span></dt>
  4560. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SAMPLING_PROGRESSIVE_NAPSAC">Constant Field Values</a></dd>
  4561. </dl>
  4562. </li>
  4563. </ul>
  4564. <a name="SAMPLING_PROSAC">
  4565. <!-- -->
  4566. </a>
  4567. <ul class="blockList">
  4568. <li class="blockList">
  4569. <h4>SAMPLING_PROSAC</h4>
  4570. <pre>public static final&nbsp;int SAMPLING_PROSAC</pre>
  4571. <dl>
  4572. <dt><span class="seeLabel">See Also:</span></dt>
  4573. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SAMPLING_PROSAC">Constant Field Values</a></dd>
  4574. </dl>
  4575. </li>
  4576. </ul>
  4577. <a name="SAMPLING_UNIFORM">
  4578. <!-- -->
  4579. </a>
  4580. <ul class="blockList">
  4581. <li class="blockList">
  4582. <h4>SAMPLING_UNIFORM</h4>
  4583. <pre>public static final&nbsp;int SAMPLING_UNIFORM</pre>
  4584. <dl>
  4585. <dt><span class="seeLabel">See Also:</span></dt>
  4586. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SAMPLING_UNIFORM">Constant Field Values</a></dd>
  4587. </dl>
  4588. </li>
  4589. </ul>
  4590. <a name="SCORE_METHOD_LMEDS">
  4591. <!-- -->
  4592. </a>
  4593. <ul class="blockList">
  4594. <li class="blockList">
  4595. <h4>SCORE_METHOD_LMEDS</h4>
  4596. <pre>public static final&nbsp;int SCORE_METHOD_LMEDS</pre>
  4597. <dl>
  4598. <dt><span class="seeLabel">See Also:</span></dt>
  4599. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SCORE_METHOD_LMEDS">Constant Field Values</a></dd>
  4600. </dl>
  4601. </li>
  4602. </ul>
  4603. <a name="SCORE_METHOD_MAGSAC">
  4604. <!-- -->
  4605. </a>
  4606. <ul class="blockList">
  4607. <li class="blockList">
  4608. <h4>SCORE_METHOD_MAGSAC</h4>
  4609. <pre>public static final&nbsp;int SCORE_METHOD_MAGSAC</pre>
  4610. <dl>
  4611. <dt><span class="seeLabel">See Also:</span></dt>
  4612. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SCORE_METHOD_MAGSAC">Constant Field Values</a></dd>
  4613. </dl>
  4614. </li>
  4615. </ul>
  4616. <a name="SCORE_METHOD_MSAC">
  4617. <!-- -->
  4618. </a>
  4619. <ul class="blockList">
  4620. <li class="blockList">
  4621. <h4>SCORE_METHOD_MSAC</h4>
  4622. <pre>public static final&nbsp;int SCORE_METHOD_MSAC</pre>
  4623. <dl>
  4624. <dt><span class="seeLabel">See Also:</span></dt>
  4625. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SCORE_METHOD_MSAC">Constant Field Values</a></dd>
  4626. </dl>
  4627. </li>
  4628. </ul>
  4629. <a name="SCORE_METHOD_RANSAC">
  4630. <!-- -->
  4631. </a>
  4632. <ul class="blockList">
  4633. <li class="blockList">
  4634. <h4>SCORE_METHOD_RANSAC</h4>
  4635. <pre>public static final&nbsp;int SCORE_METHOD_RANSAC</pre>
  4636. <dl>
  4637. <dt><span class="seeLabel">See Also:</span></dt>
  4638. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SCORE_METHOD_RANSAC">Constant Field Values</a></dd>
  4639. </dl>
  4640. </li>
  4641. </ul>
  4642. <a name="SOLVEPNP_AP3P">
  4643. <!-- -->
  4644. </a>
  4645. <ul class="blockList">
  4646. <li class="blockList">
  4647. <h4>SOLVEPNP_AP3P</h4>
  4648. <pre>public static final&nbsp;int SOLVEPNP_AP3P</pre>
  4649. <dl>
  4650. <dt><span class="seeLabel">See Also:</span></dt>
  4651. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_AP3P">Constant Field Values</a></dd>
  4652. </dl>
  4653. </li>
  4654. </ul>
  4655. <a name="SOLVEPNP_DLS">
  4656. <!-- -->
  4657. </a>
  4658. <ul class="blockList">
  4659. <li class="blockList">
  4660. <h4>SOLVEPNP_DLS</h4>
  4661. <pre>public static final&nbsp;int SOLVEPNP_DLS</pre>
  4662. <dl>
  4663. <dt><span class="seeLabel">See Also:</span></dt>
  4664. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_DLS">Constant Field Values</a></dd>
  4665. </dl>
  4666. </li>
  4667. </ul>
  4668. <a name="SOLVEPNP_EPNP">
  4669. <!-- -->
  4670. </a>
  4671. <ul class="blockList">
  4672. <li class="blockList">
  4673. <h4>SOLVEPNP_EPNP</h4>
  4674. <pre>public static final&nbsp;int SOLVEPNP_EPNP</pre>
  4675. <dl>
  4676. <dt><span class="seeLabel">See Also:</span></dt>
  4677. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_EPNP">Constant Field Values</a></dd>
  4678. </dl>
  4679. </li>
  4680. </ul>
  4681. <a name="SOLVEPNP_IPPE">
  4682. <!-- -->
  4683. </a>
  4684. <ul class="blockList">
  4685. <li class="blockList">
  4686. <h4>SOLVEPNP_IPPE</h4>
  4687. <pre>public static final&nbsp;int SOLVEPNP_IPPE</pre>
  4688. <dl>
  4689. <dt><span class="seeLabel">See Also:</span></dt>
  4690. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_IPPE">Constant Field Values</a></dd>
  4691. </dl>
  4692. </li>
  4693. </ul>
  4694. <a name="SOLVEPNP_IPPE_SQUARE">
  4695. <!-- -->
  4696. </a>
  4697. <ul class="blockList">
  4698. <li class="blockList">
  4699. <h4>SOLVEPNP_IPPE_SQUARE</h4>
  4700. <pre>public static final&nbsp;int SOLVEPNP_IPPE_SQUARE</pre>
  4701. <dl>
  4702. <dt><span class="seeLabel">See Also:</span></dt>
  4703. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_IPPE_SQUARE">Constant Field Values</a></dd>
  4704. </dl>
  4705. </li>
  4706. </ul>
  4707. <a name="SOLVEPNP_ITERATIVE">
  4708. <!-- -->
  4709. </a>
  4710. <ul class="blockList">
  4711. <li class="blockList">
  4712. <h4>SOLVEPNP_ITERATIVE</h4>
  4713. <pre>public static final&nbsp;int SOLVEPNP_ITERATIVE</pre>
  4714. <dl>
  4715. <dt><span class="seeLabel">See Also:</span></dt>
  4716. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_ITERATIVE">Constant Field Values</a></dd>
  4717. </dl>
  4718. </li>
  4719. </ul>
  4720. <a name="SOLVEPNP_MAX_COUNT">
  4721. <!-- -->
  4722. </a>
  4723. <ul class="blockList">
  4724. <li class="blockList">
  4725. <h4>SOLVEPNP_MAX_COUNT</h4>
  4726. <pre>public static final&nbsp;int SOLVEPNP_MAX_COUNT</pre>
  4727. <dl>
  4728. <dt><span class="seeLabel">See Also:</span></dt>
  4729. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_MAX_COUNT">Constant Field Values</a></dd>
  4730. </dl>
  4731. </li>
  4732. </ul>
  4733. <a name="SOLVEPNP_P3P">
  4734. <!-- -->
  4735. </a>
  4736. <ul class="blockList">
  4737. <li class="blockList">
  4738. <h4>SOLVEPNP_P3P</h4>
  4739. <pre>public static final&nbsp;int SOLVEPNP_P3P</pre>
  4740. <dl>
  4741. <dt><span class="seeLabel">See Also:</span></dt>
  4742. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_P3P">Constant Field Values</a></dd>
  4743. </dl>
  4744. </li>
  4745. </ul>
  4746. <a name="SOLVEPNP_SQPNP">
  4747. <!-- -->
  4748. </a>
  4749. <ul class="blockList">
  4750. <li class="blockList">
  4751. <h4>SOLVEPNP_SQPNP</h4>
  4752. <pre>public static final&nbsp;int SOLVEPNP_SQPNP</pre>
  4753. <dl>
  4754. <dt><span class="seeLabel">See Also:</span></dt>
  4755. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_SQPNP">Constant Field Values</a></dd>
  4756. </dl>
  4757. </li>
  4758. </ul>
  4759. <a name="SOLVEPNP_UPNP">
  4760. <!-- -->
  4761. </a>
  4762. <ul class="blockList">
  4763. <li class="blockList">
  4764. <h4>SOLVEPNP_UPNP</h4>
  4765. <pre>public static final&nbsp;int SOLVEPNP_UPNP</pre>
  4766. <dl>
  4767. <dt><span class="seeLabel">See Also:</span></dt>
  4768. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.SOLVEPNP_UPNP">Constant Field Values</a></dd>
  4769. </dl>
  4770. </li>
  4771. </ul>
  4772. <a name="USAC_ACCURATE">
  4773. <!-- -->
  4774. </a>
  4775. <ul class="blockList">
  4776. <li class="blockList">
  4777. <h4>USAC_ACCURATE</h4>
  4778. <pre>public static final&nbsp;int USAC_ACCURATE</pre>
  4779. <dl>
  4780. <dt><span class="seeLabel">See Also:</span></dt>
  4781. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_ACCURATE">Constant Field Values</a></dd>
  4782. </dl>
  4783. </li>
  4784. </ul>
  4785. <a name="USAC_DEFAULT">
  4786. <!-- -->
  4787. </a>
  4788. <ul class="blockList">
  4789. <li class="blockList">
  4790. <h4>USAC_DEFAULT</h4>
  4791. <pre>public static final&nbsp;int USAC_DEFAULT</pre>
  4792. <dl>
  4793. <dt><span class="seeLabel">See Also:</span></dt>
  4794. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_DEFAULT">Constant Field Values</a></dd>
  4795. </dl>
  4796. </li>
  4797. </ul>
  4798. <a name="USAC_FAST">
  4799. <!-- -->
  4800. </a>
  4801. <ul class="blockList">
  4802. <li class="blockList">
  4803. <h4>USAC_FAST</h4>
  4804. <pre>public static final&nbsp;int USAC_FAST</pre>
  4805. <dl>
  4806. <dt><span class="seeLabel">See Also:</span></dt>
  4807. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_FAST">Constant Field Values</a></dd>
  4808. </dl>
  4809. </li>
  4810. </ul>
  4811. <a name="USAC_FM_8PTS">
  4812. <!-- -->
  4813. </a>
  4814. <ul class="blockList">
  4815. <li class="blockList">
  4816. <h4>USAC_FM_8PTS</h4>
  4817. <pre>public static final&nbsp;int USAC_FM_8PTS</pre>
  4818. <dl>
  4819. <dt><span class="seeLabel">See Also:</span></dt>
  4820. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_FM_8PTS">Constant Field Values</a></dd>
  4821. </dl>
  4822. </li>
  4823. </ul>
  4824. <a name="USAC_MAGSAC">
  4825. <!-- -->
  4826. </a>
  4827. <ul class="blockList">
  4828. <li class="blockList">
  4829. <h4>USAC_MAGSAC</h4>
  4830. <pre>public static final&nbsp;int USAC_MAGSAC</pre>
  4831. <dl>
  4832. <dt><span class="seeLabel">See Also:</span></dt>
  4833. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_MAGSAC">Constant Field Values</a></dd>
  4834. </dl>
  4835. </li>
  4836. </ul>
  4837. <a name="USAC_PARALLEL">
  4838. <!-- -->
  4839. </a>
  4840. <ul class="blockList">
  4841. <li class="blockList">
  4842. <h4>USAC_PARALLEL</h4>
  4843. <pre>public static final&nbsp;int USAC_PARALLEL</pre>
  4844. <dl>
  4845. <dt><span class="seeLabel">See Also:</span></dt>
  4846. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_PARALLEL">Constant Field Values</a></dd>
  4847. </dl>
  4848. </li>
  4849. </ul>
  4850. <a name="USAC_PROSAC">
  4851. <!-- -->
  4852. </a>
  4853. <ul class="blockListLast">
  4854. <li class="blockList">
  4855. <h4>USAC_PROSAC</h4>
  4856. <pre>public static final&nbsp;int USAC_PROSAC</pre>
  4857. <dl>
  4858. <dt><span class="seeLabel">See Also:</span></dt>
  4859. <dd><a href="../../../constant-values.html#org.opencv.calib3d.Calib3d.USAC_PROSAC">Constant Field Values</a></dd>
  4860. </dl>
  4861. </li>
  4862. </ul>
  4863. </li>
  4864. </ul>
  4865. <!-- ========= CONSTRUCTOR DETAIL ======== -->
  4866. <ul class="blockList">
  4867. <li class="blockList"><a name="constructor.detail">
  4868. <!-- -->
  4869. </a>
  4870. <h3>Constructor Detail</h3>
  4871. <a name="Calib3d--">
  4872. <!-- -->
  4873. </a>
  4874. <ul class="blockListLast">
  4875. <li class="blockList">
  4876. <h4>Calib3d</h4>
  4877. <pre>public&nbsp;Calib3d()</pre>
  4878. </li>
  4879. </ul>
  4880. </li>
  4881. </ul>
  4882. <!-- ============ METHOD DETAIL ========== -->
  4883. <ul class="blockList">
  4884. <li class="blockList"><a name="method.detail">
  4885. <!-- -->
  4886. </a>
  4887. <h3>Method Detail</h3>
  4888. <a name="calibrateCamera-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">
  4889. <!-- -->
  4890. </a>
  4891. <ul class="blockList">
  4892. <li class="blockList">
  4893. <h4>calibrateCamera</h4>
  4894. <pre>public static&nbsp;double&nbsp;calibrateCamera(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  4895. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  4896. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  4897. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  4898. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  4899. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  4900. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</pre>
  4901. </li>
  4902. </ul>
  4903. <a name="calibrateCamera-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">
  4904. <!-- -->
  4905. </a>
  4906. <ul class="blockList">
  4907. <li class="blockList">
  4908. <h4>calibrateCamera</h4>
  4909. <pre>public static&nbsp;double&nbsp;calibrateCamera(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  4910. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  4911. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  4912. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  4913. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  4914. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  4915. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  4916. int&nbsp;flags)</pre>
  4917. </li>
  4918. </ul>
  4919. <a name="calibrateCamera-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-org.opencv.core.TermCriteria-">
  4920. <!-- -->
  4921. </a>
  4922. <ul class="blockList">
  4923. <li class="blockList">
  4924. <h4>calibrateCamera</h4>
  4925. <pre>public static&nbsp;double&nbsp;calibrateCamera(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  4926. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  4927. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  4928. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  4929. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  4930. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  4931. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  4932. int&nbsp;flags,
  4933. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  4934. </li>
  4935. </ul>
  4936. <a name="calibrateCameraExtended-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  4937. <!-- -->
  4938. </a>
  4939. <ul class="blockList">
  4940. <li class="blockList">
  4941. <h4>calibrateCameraExtended</h4>
  4942. <pre>public static&nbsp;double&nbsp;calibrateCameraExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  4943. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  4944. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  4945. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  4946. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  4947. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  4948. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  4949. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  4950. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  4951. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</pre>
  4952. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration
  4953. pattern.</div>
  4954. <dl>
  4955. <dt><span class="paramLabel">Parameters:</span></dt>
  4956. <dd><code>objectPoints</code> - In the new interface it is a vector of vectors of calibration pattern points in
  4957. the calibration pattern coordinate space (e.g. std::vector&lt;std::vector&lt;cv::Vec3f&gt;&gt;). The outer
  4958. vector contains as many elements as the number of pattern views. If the same calibration pattern
  4959. is shown in each view and it is fully visible, all the vectors will be the same. Although, it is
  4960. possible to use partially occluded patterns or even different patterns in different views. Then,
  4961. the vectors will be different. Although the points are 3D, they all lie in the calibration pattern's
  4962. XY coordinate plane (thus 0 in the Z-coordinate), if the used calibration pattern is a planar rig.
  4963. In the old interface all the vectors of object points from different views are concatenated
  4964. together.</dd>
  4965. <dd><code>imagePoints</code> - In the new interface it is a vector of vectors of the projections of calibration
  4966. pattern points (e.g. std::vector&lt;std::vector&lt;cv::Vec2f&gt;&gt;). imagePoints.size() and
  4967. objectPoints.size(), and imagePoints[i].size() and objectPoints[i].size() for each i, must be equal,
  4968. respectively. In the old interface all the vectors of object points from different views are
  4969. concatenated together.</dd>
  4970. <dd><code>imageSize</code> - Size of the image used only to initialize the camera intrinsic matrix.</dd>
  4971. <dd><code>cameraMatrix</code> - Input/output 3x3 floating-point camera intrinsic matrix
  4972. \(\cameramatrix{A}\) . If REF: CALIB_USE_INTRINSIC_GUESS
  4973. and/or REF: CALIB_FIX_ASPECT_RATIO, REF: CALIB_FIX_PRINCIPAL_POINT or REF: CALIB_FIX_FOCAL_LENGTH
  4974. are specified, some or all of fx, fy, cx, cy must be initialized before calling the function.</dd>
  4975. <dd><code>distCoeffs</code> - Input/output vector of distortion coefficients
  4976. \(\distcoeffs\).</dd>
  4977. <dd><code>rvecs</code> - Output vector of rotation vectors (REF: Rodrigues ) estimated for each pattern view
  4978. (e.g. std::vector&lt;cv::Mat&gt;&gt;). That is, each i-th rotation vector together with the corresponding
  4979. i-th translation vector (see the next output parameter description) brings the calibration pattern
  4980. from the object coordinate space (in which object points are specified) to the camera coordinate
  4981. space. In more technical terms, the tuple of the i-th rotation and translation vector performs
  4982. a change of basis from object coordinate space to camera coordinate space. Due to its duality, this
  4983. tuple is equivalent to the position of the calibration pattern with respect to the camera coordinate
  4984. space.</dd>
  4985. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter
  4986. describtion above.</dd>
  4987. <dd><code>stdDeviationsIntrinsics</code> - Output vector of standard deviations estimated for intrinsic
  4988. parameters. Order of deviations values:
  4989. \((f_x, f_y, c_x, c_y, k_1, k_2, p_1, p_2, k_3, k_4, k_5, k_6 , s_1, s_2, s_3,
  4990. s_4, \tau_x, \tau_y)\) If one of parameters is not estimated, it's deviation is equals to zero.</dd>
  4991. <dd><code>stdDeviationsExtrinsics</code> - Output vector of standard deviations estimated for extrinsic
  4992. parameters. Order of deviations values: \((R_0, T_0, \dotsc , R_{M - 1}, T_{M - 1})\) where M is
  4993. the number of pattern views. \(R_i, T_i\) are concatenated 1x3 vectors.</dd>
  4994. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.
  4995. <ul>
  4996. <li>
  4997. REF: CALIB_USE_INTRINSIC_GUESS cameraMatrix contains valid initial values of
  4998. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  4999. center ( imageSize is used), and focal distances are computed in a least-squares fashion.
  5000. Note, that if intrinsic parameters are known, there is no need to use this function just to
  5001. estimate extrinsic parameters. Use REF: solvePnP instead.
  5002. </li>
  5003. <li>
  5004. REF: CALIB_FIX_PRINCIPAL_POINT The principal point is not changed during the global
  5005. optimization. It stays at the center or at a different location specified when
  5006. REF: CALIB_USE_INTRINSIC_GUESS is set too.
  5007. </li>
  5008. <li>
  5009. REF: CALIB_FIX_ASPECT_RATIO The functions consider only fy as a free parameter. The
  5010. ratio fx/fy stays the same as in the input cameraMatrix . When
  5011. REF: CALIB_USE_INTRINSIC_GUESS is not set, the actual input values of fx and fy are
  5012. ignored, only their ratio is computed and used further.
  5013. </li>
  5014. <li>
  5015. REF: CALIB_ZERO_TANGENT_DIST Tangential distortion coefficients \((p_1, p_2)\) are set
  5016. to zeros and stay zero.
  5017. </li>
  5018. <li>
  5019. REF: CALIB_FIX_FOCAL_LENGTH The focal length is not changed during the global optimization if
  5020. REF: CALIB_USE_INTRINSIC_GUESS is set.
  5021. </li>
  5022. <li>
  5023. REF: CALIB_FIX_K1,..., REF: CALIB_FIX_K6 The corresponding radial distortion
  5024. coefficient is not changed during the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is
  5025. set, the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5026. </li>
  5027. <li>
  5028. REF: CALIB_RATIONAL_MODEL Coefficients k4, k5, and k6 are enabled. To provide the
  5029. backward compatibility, this extra flag should be explicitly specified to make the
  5030. calibration function use the rational model and return 8 coefficients or more.
  5031. </li>
  5032. <li>
  5033. REF: CALIB_THIN_PRISM_MODEL Coefficients s1, s2, s3 and s4 are enabled. To provide the
  5034. backward compatibility, this extra flag should be explicitly specified to make the
  5035. calibration function use the thin prism model and return 12 coefficients or more.
  5036. </li>
  5037. <li>
  5038. REF: CALIB_FIX_S1_S2_S3_S4 The thin prism distortion coefficients are not changed during
  5039. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  5040. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5041. </li>
  5042. <li>
  5043. REF: CALIB_TILTED_MODEL Coefficients tauX and tauY are enabled. To provide the
  5044. backward compatibility, this extra flag should be explicitly specified to make the
  5045. calibration function use the tilted sensor model and return 14 coefficients.
  5046. </li>
  5047. <li>
  5048. REF: CALIB_FIX_TAUX_TAUY The coefficients of the tilted sensor model are not changed during
  5049. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  5050. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5051. </li>
  5052. </ul></dd>
  5053. <dt><span class="returnLabel">Returns:</span></dt>
  5054. <dd>the overall RMS re-projection error.
  5055. The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
  5056. views. The algorithm is based on CITE: Zhang2000 and CITE: BouguetMCT . The coordinates of 3D object
  5057. points and their corresponding 2D projections in each view must be specified. That may be achieved
  5058. by using an object with known geometry and easily detectable feature points. Such an object is
  5059. called a calibration rig or calibration pattern, and OpenCV has built-in support for a chessboard as
  5060. a calibration rig (see REF: findChessboardCorners). Currently, initialization of intrinsic
  5061. parameters (when REF: CALIB_USE_INTRINSIC_GUESS is not set) is only implemented for planar calibration
  5062. patterns (where Z-coordinates of the object points must be all zeros). 3D calibration rigs can also
  5063. be used as long as initial cameraMatrix is provided.
  5064. The algorithm performs the following steps:
  5065. <ul>
  5066. <li>
  5067. Compute the initial intrinsic parameters (the option only available for planar calibration
  5068. patterns) or read them from the input parameters. The distortion coefficients are all set to
  5069. zeros initially unless some of CALIB_FIX_K? are specified.
  5070. </li>
  5071. </ul>
  5072. <ul>
  5073. <li>
  5074. Estimate the initial camera pose as if the intrinsic parameters have been already known. This is
  5075. done using REF: solvePnP .
  5076. </li>
  5077. </ul>
  5078. <ul>
  5079. <li>
  5080. Run the global Levenberg-Marquardt optimization algorithm to minimize the reprojection error,
  5081. that is, the total sum of squared distances between the observed feature points imagePoints and
  5082. the projected (using the current estimates for camera parameters and the poses) object points
  5083. objectPoints. See REF: projectPoints for details.
  5084. </li>
  5085. </ul>
  5086. <b>Note:</b>
  5087. If you use a non-square (i.e. non-N-by-N) grid and REF: findChessboardCorners for calibration,
  5088. and REF: calibrateCamera returns bad values (zero distortion coefficients, \(c_x\) and
  5089. \(c_y\) very far from the image center, and/or large differences between \(f_x\) and
  5090. \(f_y\) (ratios of 10:1 or more)), then you are probably using patternSize=cvSize(rows,cols)
  5091. instead of using patternSize=cvSize(cols,rows) in REF: findChessboardCorners.
  5092. SEE:
  5093. calibrateCameraRO, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate,
  5094. undistort</dd>
  5095. </dl>
  5096. </li>
  5097. </ul>
  5098. <a name="calibrateCameraExtended-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  5099. <!-- -->
  5100. </a>
  5101. <ul class="blockList">
  5102. <li class="blockList">
  5103. <h4>calibrateCameraExtended</h4>
  5104. <pre>public static&nbsp;double&nbsp;calibrateCameraExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5105. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5106. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5107. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5108. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5109. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5110. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5111. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  5112. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  5113. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  5114. int&nbsp;flags)</pre>
  5115. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration
  5116. pattern.</div>
  5117. <dl>
  5118. <dt><span class="paramLabel">Parameters:</span></dt>
  5119. <dd><code>objectPoints</code> - In the new interface it is a vector of vectors of calibration pattern points in
  5120. the calibration pattern coordinate space (e.g. std::vector&lt;std::vector&lt;cv::Vec3f&gt;&gt;). The outer
  5121. vector contains as many elements as the number of pattern views. If the same calibration pattern
  5122. is shown in each view and it is fully visible, all the vectors will be the same. Although, it is
  5123. possible to use partially occluded patterns or even different patterns in different views. Then,
  5124. the vectors will be different. Although the points are 3D, they all lie in the calibration pattern's
  5125. XY coordinate plane (thus 0 in the Z-coordinate), if the used calibration pattern is a planar rig.
  5126. In the old interface all the vectors of object points from different views are concatenated
  5127. together.</dd>
  5128. <dd><code>imagePoints</code> - In the new interface it is a vector of vectors of the projections of calibration
  5129. pattern points (e.g. std::vector&lt;std::vector&lt;cv::Vec2f&gt;&gt;). imagePoints.size() and
  5130. objectPoints.size(), and imagePoints[i].size() and objectPoints[i].size() for each i, must be equal,
  5131. respectively. In the old interface all the vectors of object points from different views are
  5132. concatenated together.</dd>
  5133. <dd><code>imageSize</code> - Size of the image used only to initialize the camera intrinsic matrix.</dd>
  5134. <dd><code>cameraMatrix</code> - Input/output 3x3 floating-point camera intrinsic matrix
  5135. \(\cameramatrix{A}\) . If REF: CALIB_USE_INTRINSIC_GUESS
  5136. and/or REF: CALIB_FIX_ASPECT_RATIO, REF: CALIB_FIX_PRINCIPAL_POINT or REF: CALIB_FIX_FOCAL_LENGTH
  5137. are specified, some or all of fx, fy, cx, cy must be initialized before calling the function.</dd>
  5138. <dd><code>distCoeffs</code> - Input/output vector of distortion coefficients
  5139. \(\distcoeffs\).</dd>
  5140. <dd><code>rvecs</code> - Output vector of rotation vectors (REF: Rodrigues ) estimated for each pattern view
  5141. (e.g. std::vector&lt;cv::Mat&gt;&gt;). That is, each i-th rotation vector together with the corresponding
  5142. i-th translation vector (see the next output parameter description) brings the calibration pattern
  5143. from the object coordinate space (in which object points are specified) to the camera coordinate
  5144. space. In more technical terms, the tuple of the i-th rotation and translation vector performs
  5145. a change of basis from object coordinate space to camera coordinate space. Due to its duality, this
  5146. tuple is equivalent to the position of the calibration pattern with respect to the camera coordinate
  5147. space.</dd>
  5148. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter
  5149. describtion above.</dd>
  5150. <dd><code>stdDeviationsIntrinsics</code> - Output vector of standard deviations estimated for intrinsic
  5151. parameters. Order of deviations values:
  5152. \((f_x, f_y, c_x, c_y, k_1, k_2, p_1, p_2, k_3, k_4, k_5, k_6 , s_1, s_2, s_3,
  5153. s_4, \tau_x, \tau_y)\) If one of parameters is not estimated, it's deviation is equals to zero.</dd>
  5154. <dd><code>stdDeviationsExtrinsics</code> - Output vector of standard deviations estimated for extrinsic
  5155. parameters. Order of deviations values: \((R_0, T_0, \dotsc , R_{M - 1}, T_{M - 1})\) where M is
  5156. the number of pattern views. \(R_i, T_i\) are concatenated 1x3 vectors.</dd>
  5157. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.</dd>
  5158. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  5159. <ul>
  5160. <li>
  5161. REF: CALIB_USE_INTRINSIC_GUESS cameraMatrix contains valid initial values of
  5162. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  5163. center ( imageSize is used), and focal distances are computed in a least-squares fashion.
  5164. Note, that if intrinsic parameters are known, there is no need to use this function just to
  5165. estimate extrinsic parameters. Use REF: solvePnP instead.
  5166. </li>
  5167. <li>
  5168. REF: CALIB_FIX_PRINCIPAL_POINT The principal point is not changed during the global
  5169. optimization. It stays at the center or at a different location specified when
  5170. REF: CALIB_USE_INTRINSIC_GUESS is set too.
  5171. </li>
  5172. <li>
  5173. REF: CALIB_FIX_ASPECT_RATIO The functions consider only fy as a free parameter. The
  5174. ratio fx/fy stays the same as in the input cameraMatrix . When
  5175. REF: CALIB_USE_INTRINSIC_GUESS is not set, the actual input values of fx and fy are
  5176. ignored, only their ratio is computed and used further.
  5177. </li>
  5178. <li>
  5179. REF: CALIB_ZERO_TANGENT_DIST Tangential distortion coefficients \((p_1, p_2)\) are set
  5180. to zeros and stay zero.
  5181. </li>
  5182. <li>
  5183. REF: CALIB_FIX_FOCAL_LENGTH The focal length is not changed during the global optimization if
  5184. REF: CALIB_USE_INTRINSIC_GUESS is set.
  5185. </li>
  5186. <li>
  5187. REF: CALIB_FIX_K1,..., REF: CALIB_FIX_K6 The corresponding radial distortion
  5188. coefficient is not changed during the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is
  5189. set, the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5190. </li>
  5191. <li>
  5192. REF: CALIB_RATIONAL_MODEL Coefficients k4, k5, and k6 are enabled. To provide the
  5193. backward compatibility, this extra flag should be explicitly specified to make the
  5194. calibration function use the rational model and return 8 coefficients or more.
  5195. </li>
  5196. <li>
  5197. REF: CALIB_THIN_PRISM_MODEL Coefficients s1, s2, s3 and s4 are enabled. To provide the
  5198. backward compatibility, this extra flag should be explicitly specified to make the
  5199. calibration function use the thin prism model and return 12 coefficients or more.
  5200. </li>
  5201. <li>
  5202. REF: CALIB_FIX_S1_S2_S3_S4 The thin prism distortion coefficients are not changed during
  5203. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  5204. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5205. </li>
  5206. <li>
  5207. REF: CALIB_TILTED_MODEL Coefficients tauX and tauY are enabled. To provide the
  5208. backward compatibility, this extra flag should be explicitly specified to make the
  5209. calibration function use the tilted sensor model and return 14 coefficients.
  5210. </li>
  5211. <li>
  5212. REF: CALIB_FIX_TAUX_TAUY The coefficients of the tilted sensor model are not changed during
  5213. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  5214. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5215. </li>
  5216. </ul></dd>
  5217. <dt><span class="returnLabel">Returns:</span></dt>
  5218. <dd>the overall RMS re-projection error.
  5219. The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
  5220. views. The algorithm is based on CITE: Zhang2000 and CITE: BouguetMCT . The coordinates of 3D object
  5221. points and their corresponding 2D projections in each view must be specified. That may be achieved
  5222. by using an object with known geometry and easily detectable feature points. Such an object is
  5223. called a calibration rig or calibration pattern, and OpenCV has built-in support for a chessboard as
  5224. a calibration rig (see REF: findChessboardCorners). Currently, initialization of intrinsic
  5225. parameters (when REF: CALIB_USE_INTRINSIC_GUESS is not set) is only implemented for planar calibration
  5226. patterns (where Z-coordinates of the object points must be all zeros). 3D calibration rigs can also
  5227. be used as long as initial cameraMatrix is provided.
  5228. The algorithm performs the following steps:
  5229. <ul>
  5230. <li>
  5231. Compute the initial intrinsic parameters (the option only available for planar calibration
  5232. patterns) or read them from the input parameters. The distortion coefficients are all set to
  5233. zeros initially unless some of CALIB_FIX_K? are specified.
  5234. </li>
  5235. </ul>
  5236. <ul>
  5237. <li>
  5238. Estimate the initial camera pose as if the intrinsic parameters have been already known. This is
  5239. done using REF: solvePnP .
  5240. </li>
  5241. </ul>
  5242. <ul>
  5243. <li>
  5244. Run the global Levenberg-Marquardt optimization algorithm to minimize the reprojection error,
  5245. that is, the total sum of squared distances between the observed feature points imagePoints and
  5246. the projected (using the current estimates for camera parameters and the poses) object points
  5247. objectPoints. See REF: projectPoints for details.
  5248. </li>
  5249. </ul>
  5250. <b>Note:</b>
  5251. If you use a non-square (i.e. non-N-by-N) grid and REF: findChessboardCorners for calibration,
  5252. and REF: calibrateCamera returns bad values (zero distortion coefficients, \(c_x\) and
  5253. \(c_y\) very far from the image center, and/or large differences between \(f_x\) and
  5254. \(f_y\) (ratios of 10:1 or more)), then you are probably using patternSize=cvSize(rows,cols)
  5255. instead of using patternSize=cvSize(cols,rows) in REF: findChessboardCorners.
  5256. SEE:
  5257. calibrateCameraRO, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate,
  5258. undistort</dd>
  5259. </dl>
  5260. </li>
  5261. </ul>
  5262. <a name="calibrateCameraExtended-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  5263. <!-- -->
  5264. </a>
  5265. <ul class="blockList">
  5266. <li class="blockList">
  5267. <h4>calibrateCameraExtended</h4>
  5268. <pre>public static&nbsp;double&nbsp;calibrateCameraExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5269. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5270. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5271. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5272. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5273. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5274. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5275. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  5276. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  5277. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  5278. int&nbsp;flags,
  5279. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  5280. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration
  5281. pattern.</div>
  5282. <dl>
  5283. <dt><span class="paramLabel">Parameters:</span></dt>
  5284. <dd><code>objectPoints</code> - In the new interface it is a vector of vectors of calibration pattern points in
  5285. the calibration pattern coordinate space (e.g. std::vector&lt;std::vector&lt;cv::Vec3f&gt;&gt;). The outer
  5286. vector contains as many elements as the number of pattern views. If the same calibration pattern
  5287. is shown in each view and it is fully visible, all the vectors will be the same. Although, it is
  5288. possible to use partially occluded patterns or even different patterns in different views. Then,
  5289. the vectors will be different. Although the points are 3D, they all lie in the calibration pattern's
  5290. XY coordinate plane (thus 0 in the Z-coordinate), if the used calibration pattern is a planar rig.
  5291. In the old interface all the vectors of object points from different views are concatenated
  5292. together.</dd>
  5293. <dd><code>imagePoints</code> - In the new interface it is a vector of vectors of the projections of calibration
  5294. pattern points (e.g. std::vector&lt;std::vector&lt;cv::Vec2f&gt;&gt;). imagePoints.size() and
  5295. objectPoints.size(), and imagePoints[i].size() and objectPoints[i].size() for each i, must be equal,
  5296. respectively. In the old interface all the vectors of object points from different views are
  5297. concatenated together.</dd>
  5298. <dd><code>imageSize</code> - Size of the image used only to initialize the camera intrinsic matrix.</dd>
  5299. <dd><code>cameraMatrix</code> - Input/output 3x3 floating-point camera intrinsic matrix
  5300. \(\cameramatrix{A}\) . If REF: CALIB_USE_INTRINSIC_GUESS
  5301. and/or REF: CALIB_FIX_ASPECT_RATIO, REF: CALIB_FIX_PRINCIPAL_POINT or REF: CALIB_FIX_FOCAL_LENGTH
  5302. are specified, some or all of fx, fy, cx, cy must be initialized before calling the function.</dd>
  5303. <dd><code>distCoeffs</code> - Input/output vector of distortion coefficients
  5304. \(\distcoeffs\).</dd>
  5305. <dd><code>rvecs</code> - Output vector of rotation vectors (REF: Rodrigues ) estimated for each pattern view
  5306. (e.g. std::vector&lt;cv::Mat&gt;&gt;). That is, each i-th rotation vector together with the corresponding
  5307. i-th translation vector (see the next output parameter description) brings the calibration pattern
  5308. from the object coordinate space (in which object points are specified) to the camera coordinate
  5309. space. In more technical terms, the tuple of the i-th rotation and translation vector performs
  5310. a change of basis from object coordinate space to camera coordinate space. Due to its duality, this
  5311. tuple is equivalent to the position of the calibration pattern with respect to the camera coordinate
  5312. space.</dd>
  5313. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter
  5314. describtion above.</dd>
  5315. <dd><code>stdDeviationsIntrinsics</code> - Output vector of standard deviations estimated for intrinsic
  5316. parameters. Order of deviations values:
  5317. \((f_x, f_y, c_x, c_y, k_1, k_2, p_1, p_2, k_3, k_4, k_5, k_6 , s_1, s_2, s_3,
  5318. s_4, \tau_x, \tau_y)\) If one of parameters is not estimated, it's deviation is equals to zero.</dd>
  5319. <dd><code>stdDeviationsExtrinsics</code> - Output vector of standard deviations estimated for extrinsic
  5320. parameters. Order of deviations values: \((R_0, T_0, \dotsc , R_{M - 1}, T_{M - 1})\) where M is
  5321. the number of pattern views. \(R_i, T_i\) are concatenated 1x3 vectors.</dd>
  5322. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.</dd>
  5323. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  5324. <ul>
  5325. <li>
  5326. REF: CALIB_USE_INTRINSIC_GUESS cameraMatrix contains valid initial values of
  5327. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  5328. center ( imageSize is used), and focal distances are computed in a least-squares fashion.
  5329. Note, that if intrinsic parameters are known, there is no need to use this function just to
  5330. estimate extrinsic parameters. Use REF: solvePnP instead.
  5331. </li>
  5332. <li>
  5333. REF: CALIB_FIX_PRINCIPAL_POINT The principal point is not changed during the global
  5334. optimization. It stays at the center or at a different location specified when
  5335. REF: CALIB_USE_INTRINSIC_GUESS is set too.
  5336. </li>
  5337. <li>
  5338. REF: CALIB_FIX_ASPECT_RATIO The functions consider only fy as a free parameter. The
  5339. ratio fx/fy stays the same as in the input cameraMatrix . When
  5340. REF: CALIB_USE_INTRINSIC_GUESS is not set, the actual input values of fx and fy are
  5341. ignored, only their ratio is computed and used further.
  5342. </li>
  5343. <li>
  5344. REF: CALIB_ZERO_TANGENT_DIST Tangential distortion coefficients \((p_1, p_2)\) are set
  5345. to zeros and stay zero.
  5346. </li>
  5347. <li>
  5348. REF: CALIB_FIX_FOCAL_LENGTH The focal length is not changed during the global optimization if
  5349. REF: CALIB_USE_INTRINSIC_GUESS is set.
  5350. </li>
  5351. <li>
  5352. REF: CALIB_FIX_K1,..., REF: CALIB_FIX_K6 The corresponding radial distortion
  5353. coefficient is not changed during the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is
  5354. set, the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5355. </li>
  5356. <li>
  5357. REF: CALIB_RATIONAL_MODEL Coefficients k4, k5, and k6 are enabled. To provide the
  5358. backward compatibility, this extra flag should be explicitly specified to make the
  5359. calibration function use the rational model and return 8 coefficients or more.
  5360. </li>
  5361. <li>
  5362. REF: CALIB_THIN_PRISM_MODEL Coefficients s1, s2, s3 and s4 are enabled. To provide the
  5363. backward compatibility, this extra flag should be explicitly specified to make the
  5364. calibration function use the thin prism model and return 12 coefficients or more.
  5365. </li>
  5366. <li>
  5367. REF: CALIB_FIX_S1_S2_S3_S4 The thin prism distortion coefficients are not changed during
  5368. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  5369. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5370. </li>
  5371. <li>
  5372. REF: CALIB_TILTED_MODEL Coefficients tauX and tauY are enabled. To provide the
  5373. backward compatibility, this extra flag should be explicitly specified to make the
  5374. calibration function use the tilted sensor model and return 14 coefficients.
  5375. </li>
  5376. <li>
  5377. REF: CALIB_FIX_TAUX_TAUY The coefficients of the tilted sensor model are not changed during
  5378. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  5379. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  5380. </li>
  5381. </ul></dd>
  5382. <dd><code>criteria</code> - Termination criteria for the iterative optimization algorithm.</dd>
  5383. <dt><span class="returnLabel">Returns:</span></dt>
  5384. <dd>the overall RMS re-projection error.
  5385. The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
  5386. views. The algorithm is based on CITE: Zhang2000 and CITE: BouguetMCT . The coordinates of 3D object
  5387. points and their corresponding 2D projections in each view must be specified. That may be achieved
  5388. by using an object with known geometry and easily detectable feature points. Such an object is
  5389. called a calibration rig or calibration pattern, and OpenCV has built-in support for a chessboard as
  5390. a calibration rig (see REF: findChessboardCorners). Currently, initialization of intrinsic
  5391. parameters (when REF: CALIB_USE_INTRINSIC_GUESS is not set) is only implemented for planar calibration
  5392. patterns (where Z-coordinates of the object points must be all zeros). 3D calibration rigs can also
  5393. be used as long as initial cameraMatrix is provided.
  5394. The algorithm performs the following steps:
  5395. <ul>
  5396. <li>
  5397. Compute the initial intrinsic parameters (the option only available for planar calibration
  5398. patterns) or read them from the input parameters. The distortion coefficients are all set to
  5399. zeros initially unless some of CALIB_FIX_K? are specified.
  5400. </li>
  5401. </ul>
  5402. <ul>
  5403. <li>
  5404. Estimate the initial camera pose as if the intrinsic parameters have been already known. This is
  5405. done using REF: solvePnP .
  5406. </li>
  5407. </ul>
  5408. <ul>
  5409. <li>
  5410. Run the global Levenberg-Marquardt optimization algorithm to minimize the reprojection error,
  5411. that is, the total sum of squared distances between the observed feature points imagePoints and
  5412. the projected (using the current estimates for camera parameters and the poses) object points
  5413. objectPoints. See REF: projectPoints for details.
  5414. </li>
  5415. </ul>
  5416. <b>Note:</b>
  5417. If you use a non-square (i.e. non-N-by-N) grid and REF: findChessboardCorners for calibration,
  5418. and REF: calibrateCamera returns bad values (zero distortion coefficients, \(c_x\) and
  5419. \(c_y\) very far from the image center, and/or large differences between \(f_x\) and
  5420. \(f_y\) (ratios of 10:1 or more)), then you are probably using patternSize=cvSize(rows,cols)
  5421. instead of using patternSize=cvSize(cols,rows) in REF: findChessboardCorners.
  5422. SEE:
  5423. calibrateCameraRO, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate,
  5424. undistort</dd>
  5425. </dl>
  5426. </li>
  5427. </ul>
  5428. <a name="calibrateCameraRO-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-">
  5429. <!-- -->
  5430. </a>
  5431. <ul class="blockList">
  5432. <li class="blockList">
  5433. <h4>calibrateCameraRO</h4>
  5434. <pre>public static&nbsp;double&nbsp;calibrateCameraRO(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5435. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5436. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5437. int&nbsp;iFixedPoint,
  5438. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5439. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5440. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5441. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5442. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints)</pre>
  5443. </li>
  5444. </ul>
  5445. <a name="calibrateCameraRO-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-">
  5446. <!-- -->
  5447. </a>
  5448. <ul class="blockList">
  5449. <li class="blockList">
  5450. <h4>calibrateCameraRO</h4>
  5451. <pre>public static&nbsp;double&nbsp;calibrateCameraRO(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5452. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5453. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5454. int&nbsp;iFixedPoint,
  5455. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5456. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5457. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5458. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5459. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  5460. int&nbsp;flags)</pre>
  5461. </li>
  5462. </ul>
  5463. <a name="calibrateCameraRO-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  5464. <!-- -->
  5465. </a>
  5466. <ul class="blockList">
  5467. <li class="blockList">
  5468. <h4>calibrateCameraRO</h4>
  5469. <pre>public static&nbsp;double&nbsp;calibrateCameraRO(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5470. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5471. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5472. int&nbsp;iFixedPoint,
  5473. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5474. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5475. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5476. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5477. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  5478. int&nbsp;flags,
  5479. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  5480. </li>
  5481. </ul>
  5482. <a name="calibrateCameraROExtended-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  5483. <!-- -->
  5484. </a>
  5485. <ul class="blockList">
  5486. <li class="blockList">
  5487. <h4>calibrateCameraROExtended</h4>
  5488. <pre>public static&nbsp;double&nbsp;calibrateCameraROExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5489. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5490. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5491. int&nbsp;iFixedPoint,
  5492. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5493. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5494. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5495. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5496. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  5497. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  5498. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  5499. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsObjPoints,
  5500. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</pre>
  5501. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.
  5502. This function is an extension of #calibrateCamera with the method of releasing object which was
  5503. proposed in CITE: strobl2011iccv. In many common cases with inaccurate, unmeasured, roughly planar
  5504. targets (calibration plates), this method can dramatically improve the precision of the estimated
  5505. camera parameters. Both the object-releasing method and standard method are supported by this
  5506. function. Use the parameter <b>iFixedPoint</b> for method selection. In the internal implementation,
  5507. #calibrateCamera is a wrapper for this function.</div>
  5508. <dl>
  5509. <dt><span class="paramLabel">Parameters:</span></dt>
  5510. <dd><code>objectPoints</code> - Vector of vectors of calibration pattern points in the calibration pattern
  5511. coordinate space. See #calibrateCamera for details. If the method of releasing object to be used,
  5512. the identical calibration board must be used in each view and it must be fully visible, and all
  5513. objectPoints[i] must be the same and all points should be roughly close to a plane. <b>The calibration
  5514. target has to be rigid, or at least static if the camera (rather than the calibration target) is
  5515. shifted for grabbing images.</b></dd>
  5516. <dd><code>imagePoints</code> - Vector of vectors of the projections of calibration pattern points. See
  5517. #calibrateCamera for details.</dd>
  5518. <dd><code>imageSize</code> - Size of the image used only to initialize the intrinsic camera matrix.</dd>
  5519. <dd><code>iFixedPoint</code> - The index of the 3D object point in objectPoints[0] to be fixed. It also acts as
  5520. a switch for calibration method selection. If object-releasing method to be used, pass in the
  5521. parameter in the range of [1, objectPoints[0].size()-2], otherwise a value out of this range will
  5522. make standard calibration method selected. Usually the top-right corner point of the calibration
  5523. board grid is recommended to be fixed when object-releasing method being utilized. According to
  5524. \cite strobl2011iccv, two other points are also fixed. In this implementation, objectPoints[0].front
  5525. and objectPoints[0].back.z are used. With object-releasing method, accurate rvecs, tvecs and
  5526. newObjPoints are only possible if coordinates of these three fixed points are accurate enough.</dd>
  5527. <dd><code>cameraMatrix</code> - Output 3x3 floating-point camera matrix. See #calibrateCamera for details.</dd>
  5528. <dd><code>distCoeffs</code> - Output vector of distortion coefficients. See #calibrateCamera for details.</dd>
  5529. <dd><code>rvecs</code> - Output vector of rotation vectors estimated for each pattern view. See #calibrateCamera
  5530. for details.</dd>
  5531. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view.</dd>
  5532. <dd><code>newObjPoints</code> - The updated output vector of calibration pattern points. The coordinates might
  5533. be scaled based on three fixed points. The returned coordinates are accurate only if the above
  5534. mentioned three fixed points are accurate. If not needed, noArray() can be passed in. This parameter
  5535. is ignored with standard calibration method.</dd>
  5536. <dd><code>stdDeviationsIntrinsics</code> - Output vector of standard deviations estimated for intrinsic parameters.
  5537. See #calibrateCamera for details.</dd>
  5538. <dd><code>stdDeviationsExtrinsics</code> - Output vector of standard deviations estimated for extrinsic parameters.
  5539. See #calibrateCamera for details.</dd>
  5540. <dd><code>stdDeviationsObjPoints</code> - Output vector of standard deviations estimated for refined coordinates
  5541. of calibration pattern points. It has the same size and order as objectPoints[0] vector. This
  5542. parameter is ignored with standard calibration method.</dd>
  5543. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.
  5544. #calibrateCamera for details. If the method of releasing object is used, the calibration time may
  5545. be much longer. CALIB_USE_QR or CALIB_USE_LU could be used for faster calibration with potentially
  5546. less precise and less stable in some rare cases.</dd>
  5547. <dt><span class="returnLabel">Returns:</span></dt>
  5548. <dd>the overall RMS re-projection error.
  5549. The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
  5550. views. The algorithm is based on CITE: Zhang2000, CITE: BouguetMCT and CITE: strobl2011iccv. See
  5551. #calibrateCamera for other detailed explanations.
  5552. SEE:
  5553. calibrateCamera, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate, undistort</dd>
  5554. </dl>
  5555. </li>
  5556. </ul>
  5557. <a name="calibrateCameraROExtended-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  5558. <!-- -->
  5559. </a>
  5560. <ul class="blockList">
  5561. <li class="blockList">
  5562. <h4>calibrateCameraROExtended</h4>
  5563. <pre>public static&nbsp;double&nbsp;calibrateCameraROExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5564. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5565. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5566. int&nbsp;iFixedPoint,
  5567. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5568. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5569. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5570. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5571. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  5572. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  5573. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  5574. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsObjPoints,
  5575. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  5576. int&nbsp;flags)</pre>
  5577. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.
  5578. This function is an extension of #calibrateCamera with the method of releasing object which was
  5579. proposed in CITE: strobl2011iccv. In many common cases with inaccurate, unmeasured, roughly planar
  5580. targets (calibration plates), this method can dramatically improve the precision of the estimated
  5581. camera parameters. Both the object-releasing method and standard method are supported by this
  5582. function. Use the parameter <b>iFixedPoint</b> for method selection. In the internal implementation,
  5583. #calibrateCamera is a wrapper for this function.</div>
  5584. <dl>
  5585. <dt><span class="paramLabel">Parameters:</span></dt>
  5586. <dd><code>objectPoints</code> - Vector of vectors of calibration pattern points in the calibration pattern
  5587. coordinate space. See #calibrateCamera for details. If the method of releasing object to be used,
  5588. the identical calibration board must be used in each view and it must be fully visible, and all
  5589. objectPoints[i] must be the same and all points should be roughly close to a plane. <b>The calibration
  5590. target has to be rigid, or at least static if the camera (rather than the calibration target) is
  5591. shifted for grabbing images.</b></dd>
  5592. <dd><code>imagePoints</code> - Vector of vectors of the projections of calibration pattern points. See
  5593. #calibrateCamera for details.</dd>
  5594. <dd><code>imageSize</code> - Size of the image used only to initialize the intrinsic camera matrix.</dd>
  5595. <dd><code>iFixedPoint</code> - The index of the 3D object point in objectPoints[0] to be fixed. It also acts as
  5596. a switch for calibration method selection. If object-releasing method to be used, pass in the
  5597. parameter in the range of [1, objectPoints[0].size()-2], otherwise a value out of this range will
  5598. make standard calibration method selected. Usually the top-right corner point of the calibration
  5599. board grid is recommended to be fixed when object-releasing method being utilized. According to
  5600. \cite strobl2011iccv, two other points are also fixed. In this implementation, objectPoints[0].front
  5601. and objectPoints[0].back.z are used. With object-releasing method, accurate rvecs, tvecs and
  5602. newObjPoints are only possible if coordinates of these three fixed points are accurate enough.</dd>
  5603. <dd><code>cameraMatrix</code> - Output 3x3 floating-point camera matrix. See #calibrateCamera for details.</dd>
  5604. <dd><code>distCoeffs</code> - Output vector of distortion coefficients. See #calibrateCamera for details.</dd>
  5605. <dd><code>rvecs</code> - Output vector of rotation vectors estimated for each pattern view. See #calibrateCamera
  5606. for details.</dd>
  5607. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view.</dd>
  5608. <dd><code>newObjPoints</code> - The updated output vector of calibration pattern points. The coordinates might
  5609. be scaled based on three fixed points. The returned coordinates are accurate only if the above
  5610. mentioned three fixed points are accurate. If not needed, noArray() can be passed in. This parameter
  5611. is ignored with standard calibration method.</dd>
  5612. <dd><code>stdDeviationsIntrinsics</code> - Output vector of standard deviations estimated for intrinsic parameters.
  5613. See #calibrateCamera for details.</dd>
  5614. <dd><code>stdDeviationsExtrinsics</code> - Output vector of standard deviations estimated for extrinsic parameters.
  5615. See #calibrateCamera for details.</dd>
  5616. <dd><code>stdDeviationsObjPoints</code> - Output vector of standard deviations estimated for refined coordinates
  5617. of calibration pattern points. It has the same size and order as objectPoints[0] vector. This
  5618. parameter is ignored with standard calibration method.</dd>
  5619. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.</dd>
  5620. <dd><code>flags</code> - Different flags that may be zero or a combination of some predefined values. See
  5621. #calibrateCamera for details. If the method of releasing object is used, the calibration time may
  5622. be much longer. CALIB_USE_QR or CALIB_USE_LU could be used for faster calibration with potentially
  5623. less precise and less stable in some rare cases.</dd>
  5624. <dt><span class="returnLabel">Returns:</span></dt>
  5625. <dd>the overall RMS re-projection error.
  5626. The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
  5627. views. The algorithm is based on CITE: Zhang2000, CITE: BouguetMCT and CITE: strobl2011iccv. See
  5628. #calibrateCamera for other detailed explanations.
  5629. SEE:
  5630. calibrateCamera, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate, undistort</dd>
  5631. </dl>
  5632. </li>
  5633. </ul>
  5634. <a name="calibrateCameraROExtended-java.util.List-java.util.List-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  5635. <!-- -->
  5636. </a>
  5637. <ul class="blockList">
  5638. <li class="blockList">
  5639. <h4>calibrateCameraROExtended</h4>
  5640. <pre>public static&nbsp;double&nbsp;calibrateCameraROExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  5641. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  5642. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  5643. int&nbsp;iFixedPoint,
  5644. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  5645. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  5646. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  5647. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  5648. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newObjPoints,
  5649. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsIntrinsics,
  5650. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsExtrinsics,
  5651. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;stdDeviationsObjPoints,
  5652. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  5653. int&nbsp;flags,
  5654. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  5655. <div class="block">Finds the camera intrinsic and extrinsic parameters from several views of a calibration pattern.
  5656. This function is an extension of #calibrateCamera with the method of releasing object which was
  5657. proposed in CITE: strobl2011iccv. In many common cases with inaccurate, unmeasured, roughly planar
  5658. targets (calibration plates), this method can dramatically improve the precision of the estimated
  5659. camera parameters. Both the object-releasing method and standard method are supported by this
  5660. function. Use the parameter <b>iFixedPoint</b> for method selection. In the internal implementation,
  5661. #calibrateCamera is a wrapper for this function.</div>
  5662. <dl>
  5663. <dt><span class="paramLabel">Parameters:</span></dt>
  5664. <dd><code>objectPoints</code> - Vector of vectors of calibration pattern points in the calibration pattern
  5665. coordinate space. See #calibrateCamera for details. If the method of releasing object to be used,
  5666. the identical calibration board must be used in each view and it must be fully visible, and all
  5667. objectPoints[i] must be the same and all points should be roughly close to a plane. <b>The calibration
  5668. target has to be rigid, or at least static if the camera (rather than the calibration target) is
  5669. shifted for grabbing images.</b></dd>
  5670. <dd><code>imagePoints</code> - Vector of vectors of the projections of calibration pattern points. See
  5671. #calibrateCamera for details.</dd>
  5672. <dd><code>imageSize</code> - Size of the image used only to initialize the intrinsic camera matrix.</dd>
  5673. <dd><code>iFixedPoint</code> - The index of the 3D object point in objectPoints[0] to be fixed. It also acts as
  5674. a switch for calibration method selection. If object-releasing method to be used, pass in the
  5675. parameter in the range of [1, objectPoints[0].size()-2], otherwise a value out of this range will
  5676. make standard calibration method selected. Usually the top-right corner point of the calibration
  5677. board grid is recommended to be fixed when object-releasing method being utilized. According to
  5678. \cite strobl2011iccv, two other points are also fixed. In this implementation, objectPoints[0].front
  5679. and objectPoints[0].back.z are used. With object-releasing method, accurate rvecs, tvecs and
  5680. newObjPoints are only possible if coordinates of these three fixed points are accurate enough.</dd>
  5681. <dd><code>cameraMatrix</code> - Output 3x3 floating-point camera matrix. See #calibrateCamera for details.</dd>
  5682. <dd><code>distCoeffs</code> - Output vector of distortion coefficients. See #calibrateCamera for details.</dd>
  5683. <dd><code>rvecs</code> - Output vector of rotation vectors estimated for each pattern view. See #calibrateCamera
  5684. for details.</dd>
  5685. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view.</dd>
  5686. <dd><code>newObjPoints</code> - The updated output vector of calibration pattern points. The coordinates might
  5687. be scaled based on three fixed points. The returned coordinates are accurate only if the above
  5688. mentioned three fixed points are accurate. If not needed, noArray() can be passed in. This parameter
  5689. is ignored with standard calibration method.</dd>
  5690. <dd><code>stdDeviationsIntrinsics</code> - Output vector of standard deviations estimated for intrinsic parameters.
  5691. See #calibrateCamera for details.</dd>
  5692. <dd><code>stdDeviationsExtrinsics</code> - Output vector of standard deviations estimated for extrinsic parameters.
  5693. See #calibrateCamera for details.</dd>
  5694. <dd><code>stdDeviationsObjPoints</code> - Output vector of standard deviations estimated for refined coordinates
  5695. of calibration pattern points. It has the same size and order as objectPoints[0] vector. This
  5696. parameter is ignored with standard calibration method.</dd>
  5697. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.</dd>
  5698. <dd><code>flags</code> - Different flags that may be zero or a combination of some predefined values. See
  5699. #calibrateCamera for details. If the method of releasing object is used, the calibration time may
  5700. be much longer. CALIB_USE_QR or CALIB_USE_LU could be used for faster calibration with potentially
  5701. less precise and less stable in some rare cases.</dd>
  5702. <dd><code>criteria</code> - Termination criteria for the iterative optimization algorithm.</dd>
  5703. <dt><span class="returnLabel">Returns:</span></dt>
  5704. <dd>the overall RMS re-projection error.
  5705. The function estimates the intrinsic camera parameters and extrinsic parameters for each of the
  5706. views. The algorithm is based on CITE: Zhang2000, CITE: BouguetMCT and CITE: strobl2011iccv. See
  5707. #calibrateCamera for other detailed explanations.
  5708. SEE:
  5709. calibrateCamera, findChessboardCorners, solvePnP, initCameraMatrix2D, stereoCalibrate, undistort</dd>
  5710. </dl>
  5711. </li>
  5712. </ul>
  5713. <a name="calibrateHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-">
  5714. <!-- -->
  5715. </a>
  5716. <ul class="blockList">
  5717. <li class="blockList">
  5718. <h4>calibrateHandEye</h4>
  5719. <pre>public static&nbsp;void&nbsp;calibrateHandEye(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_gripper2base,
  5720. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_gripper2base,
  5721. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_target2cam,
  5722. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_target2cam,
  5723. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_cam2gripper,
  5724. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_cam2gripper)</pre>
  5725. <div class="block">Computes Hand-Eye calibration: \(_{}^{g}\textrm{T}_c\)</div>
  5726. <dl>
  5727. <dt><span class="paramLabel">Parameters:</span></dt>
  5728. <dd><code>R_gripper2base</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  5729. expressed in the gripper frame to the robot base frame (\(_{}^{b}\textrm{T}_g\)).
  5730. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  5731. for all the transformations from gripper frame to robot base frame.</dd>
  5732. <dd><code>t_gripper2base</code> - Translation part extracted from the homogeneous matrix that transforms a point
  5733. expressed in the gripper frame to the robot base frame (\(_{}^{b}\textrm{T}_g\)).
  5734. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  5735. from gripper frame to robot base frame.</dd>
  5736. <dd><code>R_target2cam</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  5737. expressed in the target frame to the camera frame (\(_{}^{c}\textrm{T}_t\)).
  5738. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  5739. for all the transformations from calibration target frame to camera frame.</dd>
  5740. <dd><code>t_target2cam</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  5741. expressed in the target frame to the camera frame (\(_{}^{c}\textrm{T}_t\)).
  5742. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  5743. from calibration target frame to camera frame.</dd>
  5744. <dd><code>R_cam2gripper</code> - Estimated <code>(3x3)</code> rotation part extracted from the homogeneous matrix that transforms a point
  5745. expressed in the camera frame to the gripper frame (\(_{}^{g}\textrm{T}_c\)).</dd>
  5746. <dd><code>t_cam2gripper</code> - Estimated <code>(3x1)</code> translation part extracted from the homogeneous matrix that transforms a point
  5747. expressed in the camera frame to the gripper frame (\(_{}^{g}\textrm{T}_c\)).
  5748. The function performs the Hand-Eye calibration using various methods. One approach consists in estimating the
  5749. rotation then the translation (separable solutions) and the following methods are implemented:
  5750. <ul>
  5751. <li>
  5752. R. Tsai, R. Lenz A New Technique for Fully Autonomous and Efficient 3D Robotics Hand/EyeCalibration \cite Tsai89
  5753. </li>
  5754. <li>
  5755. F. Park, B. Martin Robot Sensor Calibration: Solving AX = XB on the Euclidean Group \cite Park94
  5756. </li>
  5757. <li>
  5758. R. Horaud, F. Dornaika Hand-Eye Calibration \cite Horaud95
  5759. </li>
  5760. </ul>
  5761. Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions),
  5762. with the following implemented methods:
  5763. <ul>
  5764. <li>
  5765. N. Andreff, R. Horaud, B. Espiau On-line Hand-Eye Calibration \cite Andreff99
  5766. </li>
  5767. <li>
  5768. K. Daniilidis Hand-Eye Calibration Using Dual Quaternions \cite Daniilidis98
  5769. </li>
  5770. </ul>
  5771. The following picture describes the Hand-Eye calibration problem where the transformation between a camera ("eye")
  5772. mounted on a robot gripper ("hand") has to be estimated. This configuration is called eye-in-hand.
  5773. The eye-to-hand configuration consists in a static camera observing a calibration pattern mounted on the robot
  5774. end-effector. The transformation from the camera to the robot base frame can then be estimated by inputting
  5775. the suitable transformations to the function, see below.
  5776. ![](pics/hand-eye_figure.png)
  5777. The calibration procedure is the following:
  5778. <ul>
  5779. <li>
  5780. a static calibration pattern is used to estimate the transformation between the target frame
  5781. and the camera frame
  5782. </li>
  5783. <li>
  5784. the robot gripper is moved in order to acquire several poses
  5785. </li>
  5786. <li>
  5787. for each pose, the homogeneous transformation between the gripper frame and the robot base frame is recorded using for
  5788. instance the robot kinematics
  5789. \(
  5790. \begin{bmatrix}
  5791. X_b\\
  5792. Y_b\\
  5793. Z_b\\
  5794. 1
  5795. \end{bmatrix}
  5796. =
  5797. \begin{bmatrix}
  5798. _{}^{b}\textrm{R}_g &amp; _{}^{b}\textrm{t}_g \\
  5799. 0_{1 \times 3} &amp; 1
  5800. \end{bmatrix}
  5801. \begin{bmatrix}
  5802. X_g\\
  5803. Y_g\\
  5804. Z_g\\
  5805. 1
  5806. \end{bmatrix}
  5807. \)
  5808. </li>
  5809. <li>
  5810. for each pose, the homogeneous transformation between the calibration target frame and the camera frame is recorded using
  5811. for instance a pose estimation method (PnP) from 2D-3D point correspondences
  5812. \(
  5813. \begin{bmatrix}
  5814. X_c\\
  5815. Y_c\\
  5816. Z_c\\
  5817. 1
  5818. \end{bmatrix}
  5819. =
  5820. \begin{bmatrix}
  5821. _{}^{c}\textrm{R}_t &amp; _{}^{c}\textrm{t}_t \\
  5822. 0_{1 \times 3} &amp; 1
  5823. \end{bmatrix}
  5824. \begin{bmatrix}
  5825. X_t\\
  5826. Y_t\\
  5827. Z_t\\
  5828. 1
  5829. \end{bmatrix}
  5830. \)
  5831. </li>
  5832. </ul>
  5833. The Hand-Eye calibration procedure returns the following homogeneous transformation
  5834. \(
  5835. \begin{bmatrix}
  5836. X_g\\
  5837. Y_g\\
  5838. Z_g\\
  5839. 1
  5840. \end{bmatrix}
  5841. =
  5842. \begin{bmatrix}
  5843. _{}^{g}\textrm{R}_c &amp; _{}^{g}\textrm{t}_c \\
  5844. 0_{1 \times 3} &amp; 1
  5845. \end{bmatrix}
  5846. \begin{bmatrix}
  5847. X_c\\
  5848. Y_c\\
  5849. Z_c\\
  5850. 1
  5851. \end{bmatrix}
  5852. \)
  5853. This problem is also known as solving the \(\mathbf{A}\mathbf{X}=\mathbf{X}\mathbf{B}\) equation:
  5854. <ul>
  5855. <li>
  5856. for an eye-in-hand configuration
  5857. \(
  5858. \begin{align*}
  5859. ^{b}{\textrm{T}_g}^{(1)} \hspace{0.2em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &amp;=
  5860. \hspace{0.1em} ^{b}{\textrm{T}_g}^{(2)} \hspace{0.2em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} \\
  5861. </li>
  5862. </ul>
  5863. (^{b}{\textrm{T}_g}^{(2)})^{-1} \hspace{0.2em} ^{b}{\textrm{T}_g}^{(1)} \hspace{0.2em} ^{g}\textrm{T}_c &amp;=
  5864. \hspace{0.1em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} (^{c}{\textrm{T}_t}^{(1)})^{-1} \\
  5865. \textrm{A}_i \textrm{X} &amp;= \textrm{X} \textrm{B}_i \\
  5866. \end{align*}
  5867. \)
  5868. <ul>
  5869. <li>
  5870. for an eye-to-hand configuration
  5871. \(
  5872. \begin{align*}
  5873. ^{g}{\textrm{T}_b}^{(1)} \hspace{0.2em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &amp;=
  5874. \hspace{0.1em} ^{g}{\textrm{T}_b}^{(2)} \hspace{0.2em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} \\
  5875. </li>
  5876. </ul>
  5877. (^{g}{\textrm{T}_b}^{(2)})^{-1} \hspace{0.2em} ^{g}{\textrm{T}_b}^{(1)} \hspace{0.2em} ^{b}\textrm{T}_c &amp;=
  5878. \hspace{0.1em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} (^{c}{\textrm{T}_t}^{(1)})^{-1} \\
  5879. \textrm{A}_i \textrm{X} &amp;= \textrm{X} \textrm{B}_i \\
  5880. \end{align*}
  5881. \)
  5882. \note
  5883. Additional information can be found on this [website](http://campar.in.tum.de/Chair/HandEyeCalibration).
  5884. \note
  5885. A minimum of 2 motions with non parallel rotation axes are necessary to determine the hand-eye transformation.
  5886. So at least 3 different poses are required, but it is strongly recommended to use many more poses.</dd>
  5887. </dl>
  5888. </li>
  5889. </ul>
  5890. <a name="calibrateHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  5891. <!-- -->
  5892. </a>
  5893. <ul class="blockList">
  5894. <li class="blockList">
  5895. <h4>calibrateHandEye</h4>
  5896. <pre>public static&nbsp;void&nbsp;calibrateHandEye(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_gripper2base,
  5897. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_gripper2base,
  5898. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_target2cam,
  5899. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_target2cam,
  5900. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_cam2gripper,
  5901. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_cam2gripper,
  5902. int&nbsp;method)</pre>
  5903. <div class="block">Computes Hand-Eye calibration: \(_{}^{g}\textrm{T}_c\)</div>
  5904. <dl>
  5905. <dt><span class="paramLabel">Parameters:</span></dt>
  5906. <dd><code>R_gripper2base</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  5907. expressed in the gripper frame to the robot base frame (\(_{}^{b}\textrm{T}_g\)).
  5908. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  5909. for all the transformations from gripper frame to robot base frame.</dd>
  5910. <dd><code>t_gripper2base</code> - Translation part extracted from the homogeneous matrix that transforms a point
  5911. expressed in the gripper frame to the robot base frame (\(_{}^{b}\textrm{T}_g\)).
  5912. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  5913. from gripper frame to robot base frame.</dd>
  5914. <dd><code>R_target2cam</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  5915. expressed in the target frame to the camera frame (\(_{}^{c}\textrm{T}_t\)).
  5916. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  5917. for all the transformations from calibration target frame to camera frame.</dd>
  5918. <dd><code>t_target2cam</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  5919. expressed in the target frame to the camera frame (\(_{}^{c}\textrm{T}_t\)).
  5920. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  5921. from calibration target frame to camera frame.</dd>
  5922. <dd><code>R_cam2gripper</code> - Estimated <code>(3x3)</code> rotation part extracted from the homogeneous matrix that transforms a point
  5923. expressed in the camera frame to the gripper frame (\(_{}^{g}\textrm{T}_c\)).</dd>
  5924. <dd><code>t_cam2gripper</code> - Estimated <code>(3x1)</code> translation part extracted from the homogeneous matrix that transforms a point
  5925. expressed in the camera frame to the gripper frame (\(_{}^{g}\textrm{T}_c\)).</dd>
  5926. <dd><code>method</code> - One of the implemented Hand-Eye calibration method, see cv::HandEyeCalibrationMethod
  5927. The function performs the Hand-Eye calibration using various methods. One approach consists in estimating the
  5928. rotation then the translation (separable solutions) and the following methods are implemented:
  5929. <ul>
  5930. <li>
  5931. R. Tsai, R. Lenz A New Technique for Fully Autonomous and Efficient 3D Robotics Hand/EyeCalibration \cite Tsai89
  5932. </li>
  5933. <li>
  5934. F. Park, B. Martin Robot Sensor Calibration: Solving AX = XB on the Euclidean Group \cite Park94
  5935. </li>
  5936. <li>
  5937. R. Horaud, F. Dornaika Hand-Eye Calibration \cite Horaud95
  5938. </li>
  5939. </ul>
  5940. Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions),
  5941. with the following implemented methods:
  5942. <ul>
  5943. <li>
  5944. N. Andreff, R. Horaud, B. Espiau On-line Hand-Eye Calibration \cite Andreff99
  5945. </li>
  5946. <li>
  5947. K. Daniilidis Hand-Eye Calibration Using Dual Quaternions \cite Daniilidis98
  5948. </li>
  5949. </ul>
  5950. The following picture describes the Hand-Eye calibration problem where the transformation between a camera ("eye")
  5951. mounted on a robot gripper ("hand") has to be estimated. This configuration is called eye-in-hand.
  5952. The eye-to-hand configuration consists in a static camera observing a calibration pattern mounted on the robot
  5953. end-effector. The transformation from the camera to the robot base frame can then be estimated by inputting
  5954. the suitable transformations to the function, see below.
  5955. ![](pics/hand-eye_figure.png)
  5956. The calibration procedure is the following:
  5957. <ul>
  5958. <li>
  5959. a static calibration pattern is used to estimate the transformation between the target frame
  5960. and the camera frame
  5961. </li>
  5962. <li>
  5963. the robot gripper is moved in order to acquire several poses
  5964. </li>
  5965. <li>
  5966. for each pose, the homogeneous transformation between the gripper frame and the robot base frame is recorded using for
  5967. instance the robot kinematics
  5968. \(
  5969. \begin{bmatrix}
  5970. X_b\\
  5971. Y_b\\
  5972. Z_b\\
  5973. 1
  5974. \end{bmatrix}
  5975. =
  5976. \begin{bmatrix}
  5977. _{}^{b}\textrm{R}_g &amp; _{}^{b}\textrm{t}_g \\
  5978. 0_{1 \times 3} &amp; 1
  5979. \end{bmatrix}
  5980. \begin{bmatrix}
  5981. X_g\\
  5982. Y_g\\
  5983. Z_g\\
  5984. 1
  5985. \end{bmatrix}
  5986. \)
  5987. </li>
  5988. <li>
  5989. for each pose, the homogeneous transformation between the calibration target frame and the camera frame is recorded using
  5990. for instance a pose estimation method (PnP) from 2D-3D point correspondences
  5991. \(
  5992. \begin{bmatrix}
  5993. X_c\\
  5994. Y_c\\
  5995. Z_c\\
  5996. 1
  5997. \end{bmatrix}
  5998. =
  5999. \begin{bmatrix}
  6000. _{}^{c}\textrm{R}_t &amp; _{}^{c}\textrm{t}_t \\
  6001. 0_{1 \times 3} &amp; 1
  6002. \end{bmatrix}
  6003. \begin{bmatrix}
  6004. X_t\\
  6005. Y_t\\
  6006. Z_t\\
  6007. 1
  6008. \end{bmatrix}
  6009. \)
  6010. </li>
  6011. </ul>
  6012. The Hand-Eye calibration procedure returns the following homogeneous transformation
  6013. \(
  6014. \begin{bmatrix}
  6015. X_g\\
  6016. Y_g\\
  6017. Z_g\\
  6018. 1
  6019. \end{bmatrix}
  6020. =
  6021. \begin{bmatrix}
  6022. _{}^{g}\textrm{R}_c &amp; _{}^{g}\textrm{t}_c \\
  6023. 0_{1 \times 3} &amp; 1
  6024. \end{bmatrix}
  6025. \begin{bmatrix}
  6026. X_c\\
  6027. Y_c\\
  6028. Z_c\\
  6029. 1
  6030. \end{bmatrix}
  6031. \)
  6032. This problem is also known as solving the \(\mathbf{A}\mathbf{X}=\mathbf{X}\mathbf{B}\) equation:
  6033. <ul>
  6034. <li>
  6035. for an eye-in-hand configuration
  6036. \(
  6037. \begin{align*}
  6038. ^{b}{\textrm{T}_g}^{(1)} \hspace{0.2em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &amp;=
  6039. \hspace{0.1em} ^{b}{\textrm{T}_g}^{(2)} \hspace{0.2em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} \\
  6040. </li>
  6041. </ul>
  6042. (^{b}{\textrm{T}_g}^{(2)})^{-1} \hspace{0.2em} ^{b}{\textrm{T}_g}^{(1)} \hspace{0.2em} ^{g}\textrm{T}_c &amp;=
  6043. \hspace{0.1em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} (^{c}{\textrm{T}_t}^{(1)})^{-1} \\
  6044. \textrm{A}_i \textrm{X} &amp;= \textrm{X} \textrm{B}_i \\
  6045. \end{align*}
  6046. \)
  6047. <ul>
  6048. <li>
  6049. for an eye-to-hand configuration
  6050. \(
  6051. \begin{align*}
  6052. ^{g}{\textrm{T}_b}^{(1)} \hspace{0.2em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &amp;=
  6053. \hspace{0.1em} ^{g}{\textrm{T}_b}^{(2)} \hspace{0.2em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} \\
  6054. </li>
  6055. </ul>
  6056. (^{g}{\textrm{T}_b}^{(2)})^{-1} \hspace{0.2em} ^{g}{\textrm{T}_b}^{(1)} \hspace{0.2em} ^{b}\textrm{T}_c &amp;=
  6057. \hspace{0.1em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} (^{c}{\textrm{T}_t}^{(1)})^{-1} \\
  6058. \textrm{A}_i \textrm{X} &amp;= \textrm{X} \textrm{B}_i \\
  6059. \end{align*}
  6060. \)
  6061. \note
  6062. Additional information can be found on this [website](http://campar.in.tum.de/Chair/HandEyeCalibration).
  6063. \note
  6064. A minimum of 2 motions with non parallel rotation axes are necessary to determine the hand-eye transformation.
  6065. So at least 3 different poses are required, but it is strongly recommended to use many more poses.</dd>
  6066. </dl>
  6067. </li>
  6068. </ul>
  6069. <a name="calibrateRobotWorldHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6070. <!-- -->
  6071. </a>
  6072. <ul class="blockList">
  6073. <li class="blockList">
  6074. <h4>calibrateRobotWorldHandEye</h4>
  6075. <pre>public static&nbsp;void&nbsp;calibrateRobotWorldHandEye(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_world2cam,
  6076. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_world2cam,
  6077. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_base2gripper,
  6078. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_base2gripper,
  6079. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_base2world,
  6080. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_base2world,
  6081. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_gripper2cam,
  6082. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_gripper2cam)</pre>
  6083. <div class="block">Computes Robot-World/Hand-Eye calibration: \(_{}^{w}\textrm{T}_b\) and \(_{}^{c}\textrm{T}_g\)</div>
  6084. <dl>
  6085. <dt><span class="paramLabel">Parameters:</span></dt>
  6086. <dd><code>R_world2cam</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  6087. expressed in the world frame to the camera frame (\(_{}^{c}\textrm{T}_w\)).
  6088. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  6089. for all the transformations from world frame to the camera frame.</dd>
  6090. <dd><code>t_world2cam</code> - Translation part extracted from the homogeneous matrix that transforms a point
  6091. expressed in the world frame to the camera frame (\(_{}^{c}\textrm{T}_w\)).
  6092. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  6093. from world frame to the camera frame.</dd>
  6094. <dd><code>R_base2gripper</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  6095. expressed in the robot base frame to the gripper frame (\(_{}^{g}\textrm{T}_b\)).
  6096. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  6097. for all the transformations from robot base frame to the gripper frame.</dd>
  6098. <dd><code>t_base2gripper</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  6099. expressed in the robot base frame to the gripper frame (\(_{}^{g}\textrm{T}_b\)).
  6100. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  6101. from robot base frame to the gripper frame.</dd>
  6102. <dd><code>R_base2world</code> - Estimated <code>(3x3)</code> rotation part extracted from the homogeneous matrix that transforms a point
  6103. expressed in the robot base frame to the world frame (\(_{}^{w}\textrm{T}_b\)).</dd>
  6104. <dd><code>t_base2world</code> - Estimated <code>(3x1)</code> translation part extracted from the homogeneous matrix that transforms a point
  6105. expressed in the robot base frame to the world frame (\(_{}^{w}\textrm{T}_b\)).</dd>
  6106. <dd><code>R_gripper2cam</code> - Estimated <code>(3x3)</code> rotation part extracted from the homogeneous matrix that transforms a point
  6107. expressed in the gripper frame to the camera frame (\(_{}^{c}\textrm{T}_g\)).</dd>
  6108. <dd><code>t_gripper2cam</code> - Estimated <code>(3x1)</code> translation part extracted from the homogeneous matrix that transforms a point
  6109. expressed in the gripper frame to the camera frame (\(_{}^{c}\textrm{T}_g\)).
  6110. The function performs the Robot-World/Hand-Eye calibration using various methods. One approach consists in estimating the
  6111. rotation then the translation (separable solutions):
  6112. <ul>
  6113. <li>
  6114. M. Shah, Solving the robot-world/hand-eye calibration problem using the kronecker product \cite Shah2013SolvingTR
  6115. </li>
  6116. </ul>
  6117. Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions),
  6118. with the following implemented method:
  6119. <ul>
  6120. <li>
  6121. A. Li, L. Wang, and D. Wu, Simultaneous robot-world and hand-eye calibration using dual-quaternions and kronecker product \cite Li2010SimultaneousRA
  6122. </li>
  6123. </ul>
  6124. The following picture describes the Robot-World/Hand-Eye calibration problem where the transformations between a robot and a world frame
  6125. and between a robot gripper ("hand") and a camera ("eye") mounted at the robot end-effector have to be estimated.
  6126. ![](pics/robot-world_hand-eye_figure.png)
  6127. The calibration procedure is the following:
  6128. <ul>
  6129. <li>
  6130. a static calibration pattern is used to estimate the transformation between the target frame
  6131. and the camera frame
  6132. </li>
  6133. <li>
  6134. the robot gripper is moved in order to acquire several poses
  6135. </li>
  6136. <li>
  6137. for each pose, the homogeneous transformation between the gripper frame and the robot base frame is recorded using for
  6138. instance the robot kinematics
  6139. \(
  6140. \begin{bmatrix}
  6141. X_g\\
  6142. Y_g\\
  6143. Z_g\\
  6144. 1
  6145. \end{bmatrix}
  6146. =
  6147. \begin{bmatrix}
  6148. _{}^{g}\textrm{R}_b &amp; _{}^{g}\textrm{t}_b \\
  6149. 0_{1 \times 3} &amp; 1
  6150. \end{bmatrix}
  6151. \begin{bmatrix}
  6152. X_b\\
  6153. Y_b\\
  6154. Z_b\\
  6155. 1
  6156. \end{bmatrix}
  6157. \)
  6158. </li>
  6159. <li>
  6160. for each pose, the homogeneous transformation between the calibration target frame (the world frame) and the camera frame is recorded using
  6161. for instance a pose estimation method (PnP) from 2D-3D point correspondences
  6162. \(
  6163. \begin{bmatrix}
  6164. X_c\\
  6165. Y_c\\
  6166. Z_c\\
  6167. 1
  6168. \end{bmatrix}
  6169. =
  6170. \begin{bmatrix}
  6171. _{}^{c}\textrm{R}_w &amp; _{}^{c}\textrm{t}_w \\
  6172. 0_{1 \times 3} &amp; 1
  6173. \end{bmatrix}
  6174. \begin{bmatrix}
  6175. X_w\\
  6176. Y_w\\
  6177. Z_w\\
  6178. 1
  6179. \end{bmatrix}
  6180. \)
  6181. </li>
  6182. </ul>
  6183. The Robot-World/Hand-Eye calibration procedure returns the following homogeneous transformations
  6184. \(
  6185. \begin{bmatrix}
  6186. X_w\\
  6187. Y_w\\
  6188. Z_w\\
  6189. 1
  6190. \end{bmatrix}
  6191. =
  6192. \begin{bmatrix}
  6193. _{}^{w}\textrm{R}_b &amp; _{}^{w}\textrm{t}_b \\
  6194. 0_{1 \times 3} &amp; 1
  6195. \end{bmatrix}
  6196. \begin{bmatrix}
  6197. X_b\\
  6198. Y_b\\
  6199. Z_b\\
  6200. 1
  6201. \end{bmatrix}
  6202. \)
  6203. \(
  6204. \begin{bmatrix}
  6205. X_c\\
  6206. Y_c\\
  6207. Z_c\\
  6208. 1
  6209. \end{bmatrix}
  6210. =
  6211. \begin{bmatrix}
  6212. _{}^{c}\textrm{R}_g &amp; _{}^{c}\textrm{t}_g \\
  6213. 0_{1 \times 3} &amp; 1
  6214. \end{bmatrix}
  6215. \begin{bmatrix}
  6216. X_g\\
  6217. Y_g\\
  6218. Z_g\\
  6219. 1
  6220. \end{bmatrix}
  6221. \)
  6222. This problem is also known as solving the \(\mathbf{A}\mathbf{X}=\mathbf{Z}\mathbf{B}\) equation, with:
  6223. <ul>
  6224. <li>
  6225. \(\mathbf{A} \Leftrightarrow \hspace{0.1em} _{}^{c}\textrm{T}_w\)
  6226. </li>
  6227. <li>
  6228. \(\mathbf{X} \Leftrightarrow \hspace{0.1em} _{}^{w}\textrm{T}_b\)
  6229. </li>
  6230. <li>
  6231. \(\mathbf{Z} \Leftrightarrow \hspace{0.1em} _{}^{c}\textrm{T}_g\)
  6232. </li>
  6233. <li>
  6234. \(\mathbf{B} \Leftrightarrow \hspace{0.1em} _{}^{g}\textrm{T}_b\)
  6235. </li>
  6236. </ul>
  6237. \note
  6238. At least 3 measurements are required (input vectors size must be greater or equal to 3).</dd>
  6239. </dl>
  6240. </li>
  6241. </ul>
  6242. <a name="calibrateRobotWorldHandEye-java.util.List-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  6243. <!-- -->
  6244. </a>
  6245. <ul class="blockList">
  6246. <li class="blockList">
  6247. <h4>calibrateRobotWorldHandEye</h4>
  6248. <pre>public static&nbsp;void&nbsp;calibrateRobotWorldHandEye(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_world2cam,
  6249. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_world2cam,
  6250. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;R_base2gripper,
  6251. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;t_base2gripper,
  6252. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_base2world,
  6253. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_base2world,
  6254. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R_gripper2cam,
  6255. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t_gripper2cam,
  6256. int&nbsp;method)</pre>
  6257. <div class="block">Computes Robot-World/Hand-Eye calibration: \(_{}^{w}\textrm{T}_b\) and \(_{}^{c}\textrm{T}_g\)</div>
  6258. <dl>
  6259. <dt><span class="paramLabel">Parameters:</span></dt>
  6260. <dd><code>R_world2cam</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  6261. expressed in the world frame to the camera frame (\(_{}^{c}\textrm{T}_w\)).
  6262. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  6263. for all the transformations from world frame to the camera frame.</dd>
  6264. <dd><code>t_world2cam</code> - Translation part extracted from the homogeneous matrix that transforms a point
  6265. expressed in the world frame to the camera frame (\(_{}^{c}\textrm{T}_w\)).
  6266. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  6267. from world frame to the camera frame.</dd>
  6268. <dd><code>R_base2gripper</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  6269. expressed in the robot base frame to the gripper frame (\(_{}^{g}\textrm{T}_b\)).
  6270. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the rotation, <code>(3x3)</code> rotation matrices or <code>(3x1)</code> rotation vectors,
  6271. for all the transformations from robot base frame to the gripper frame.</dd>
  6272. <dd><code>t_base2gripper</code> - Rotation part extracted from the homogeneous matrix that transforms a point
  6273. expressed in the robot base frame to the gripper frame (\(_{}^{g}\textrm{T}_b\)).
  6274. This is a vector (<code>vector&amp;lt;Mat&amp;gt;</code>) that contains the <code>(3x1)</code> translation vectors for all the transformations
  6275. from robot base frame to the gripper frame.</dd>
  6276. <dd><code>R_base2world</code> - Estimated <code>(3x3)</code> rotation part extracted from the homogeneous matrix that transforms a point
  6277. expressed in the robot base frame to the world frame (\(_{}^{w}\textrm{T}_b\)).</dd>
  6278. <dd><code>t_base2world</code> - Estimated <code>(3x1)</code> translation part extracted from the homogeneous matrix that transforms a point
  6279. expressed in the robot base frame to the world frame (\(_{}^{w}\textrm{T}_b\)).</dd>
  6280. <dd><code>R_gripper2cam</code> - Estimated <code>(3x3)</code> rotation part extracted from the homogeneous matrix that transforms a point
  6281. expressed in the gripper frame to the camera frame (\(_{}^{c}\textrm{T}_g\)).</dd>
  6282. <dd><code>t_gripper2cam</code> - Estimated <code>(3x1)</code> translation part extracted from the homogeneous matrix that transforms a point
  6283. expressed in the gripper frame to the camera frame (\(_{}^{c}\textrm{T}_g\)).</dd>
  6284. <dd><code>method</code> - One of the implemented Robot-World/Hand-Eye calibration method, see cv::RobotWorldHandEyeCalibrationMethod
  6285. The function performs the Robot-World/Hand-Eye calibration using various methods. One approach consists in estimating the
  6286. rotation then the translation (separable solutions):
  6287. <ul>
  6288. <li>
  6289. M. Shah, Solving the robot-world/hand-eye calibration problem using the kronecker product \cite Shah2013SolvingTR
  6290. </li>
  6291. </ul>
  6292. Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions),
  6293. with the following implemented method:
  6294. <ul>
  6295. <li>
  6296. A. Li, L. Wang, and D. Wu, Simultaneous robot-world and hand-eye calibration using dual-quaternions and kronecker product \cite Li2010SimultaneousRA
  6297. </li>
  6298. </ul>
  6299. The following picture describes the Robot-World/Hand-Eye calibration problem where the transformations between a robot and a world frame
  6300. and between a robot gripper ("hand") and a camera ("eye") mounted at the robot end-effector have to be estimated.
  6301. ![](pics/robot-world_hand-eye_figure.png)
  6302. The calibration procedure is the following:
  6303. <ul>
  6304. <li>
  6305. a static calibration pattern is used to estimate the transformation between the target frame
  6306. and the camera frame
  6307. </li>
  6308. <li>
  6309. the robot gripper is moved in order to acquire several poses
  6310. </li>
  6311. <li>
  6312. for each pose, the homogeneous transformation between the gripper frame and the robot base frame is recorded using for
  6313. instance the robot kinematics
  6314. \(
  6315. \begin{bmatrix}
  6316. X_g\\
  6317. Y_g\\
  6318. Z_g\\
  6319. 1
  6320. \end{bmatrix}
  6321. =
  6322. \begin{bmatrix}
  6323. _{}^{g}\textrm{R}_b &amp; _{}^{g}\textrm{t}_b \\
  6324. 0_{1 \times 3} &amp; 1
  6325. \end{bmatrix}
  6326. \begin{bmatrix}
  6327. X_b\\
  6328. Y_b\\
  6329. Z_b\\
  6330. 1
  6331. \end{bmatrix}
  6332. \)
  6333. </li>
  6334. <li>
  6335. for each pose, the homogeneous transformation between the calibration target frame (the world frame) and the camera frame is recorded using
  6336. for instance a pose estimation method (PnP) from 2D-3D point correspondences
  6337. \(
  6338. \begin{bmatrix}
  6339. X_c\\
  6340. Y_c\\
  6341. Z_c\\
  6342. 1
  6343. \end{bmatrix}
  6344. =
  6345. \begin{bmatrix}
  6346. _{}^{c}\textrm{R}_w &amp; _{}^{c}\textrm{t}_w \\
  6347. 0_{1 \times 3} &amp; 1
  6348. \end{bmatrix}
  6349. \begin{bmatrix}
  6350. X_w\\
  6351. Y_w\\
  6352. Z_w\\
  6353. 1
  6354. \end{bmatrix}
  6355. \)
  6356. </li>
  6357. </ul>
  6358. The Robot-World/Hand-Eye calibration procedure returns the following homogeneous transformations
  6359. \(
  6360. \begin{bmatrix}
  6361. X_w\\
  6362. Y_w\\
  6363. Z_w\\
  6364. 1
  6365. \end{bmatrix}
  6366. =
  6367. \begin{bmatrix}
  6368. _{}^{w}\textrm{R}_b &amp; _{}^{w}\textrm{t}_b \\
  6369. 0_{1 \times 3} &amp; 1
  6370. \end{bmatrix}
  6371. \begin{bmatrix}
  6372. X_b\\
  6373. Y_b\\
  6374. Z_b\\
  6375. 1
  6376. \end{bmatrix}
  6377. \)
  6378. \(
  6379. \begin{bmatrix}
  6380. X_c\\
  6381. Y_c\\
  6382. Z_c\\
  6383. 1
  6384. \end{bmatrix}
  6385. =
  6386. \begin{bmatrix}
  6387. _{}^{c}\textrm{R}_g &amp; _{}^{c}\textrm{t}_g \\
  6388. 0_{1 \times 3} &amp; 1
  6389. \end{bmatrix}
  6390. \begin{bmatrix}
  6391. X_g\\
  6392. Y_g\\
  6393. Z_g\\
  6394. 1
  6395. \end{bmatrix}
  6396. \)
  6397. This problem is also known as solving the \(\mathbf{A}\mathbf{X}=\mathbf{Z}\mathbf{B}\) equation, with:
  6398. <ul>
  6399. <li>
  6400. \(\mathbf{A} \Leftrightarrow \hspace{0.1em} _{}^{c}\textrm{T}_w\)
  6401. </li>
  6402. <li>
  6403. \(\mathbf{X} \Leftrightarrow \hspace{0.1em} _{}^{w}\textrm{T}_b\)
  6404. </li>
  6405. <li>
  6406. \(\mathbf{Z} \Leftrightarrow \hspace{0.1em} _{}^{c}\textrm{T}_g\)
  6407. </li>
  6408. <li>
  6409. \(\mathbf{B} \Leftrightarrow \hspace{0.1em} _{}^{g}\textrm{T}_b\)
  6410. </li>
  6411. </ul>
  6412. \note
  6413. At least 3 measurements are required (input vectors size must be greater or equal to 3).</dd>
  6414. </dl>
  6415. </li>
  6416. </ul>
  6417. <a name="calibrationMatrixValues-org.opencv.core.Mat-org.opencv.core.Size-double-double-double:A-double:A-double:A-org.opencv.core.Point-double:A-">
  6418. <!-- -->
  6419. </a>
  6420. <ul class="blockList">
  6421. <li class="blockList">
  6422. <h4>calibrationMatrixValues</h4>
  6423. <pre>public static&nbsp;void&nbsp;calibrationMatrixValues(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  6424. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  6425. double&nbsp;apertureWidth,
  6426. double&nbsp;apertureHeight,
  6427. double[]&nbsp;fovx,
  6428. double[]&nbsp;fovy,
  6429. double[]&nbsp;focalLength,
  6430. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;principalPoint,
  6431. double[]&nbsp;aspectRatio)</pre>
  6432. <div class="block">Computes useful camera characteristics from the camera intrinsic matrix.</div>
  6433. <dl>
  6434. <dt><span class="paramLabel">Parameters:</span></dt>
  6435. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix that can be estimated by #calibrateCamera or
  6436. #stereoCalibrate .</dd>
  6437. <dd><code>imageSize</code> - Input image size in pixels.</dd>
  6438. <dd><code>apertureWidth</code> - Physical width in mm of the sensor.</dd>
  6439. <dd><code>apertureHeight</code> - Physical height in mm of the sensor.</dd>
  6440. <dd><code>fovx</code> - Output field of view in degrees along the horizontal sensor axis.</dd>
  6441. <dd><code>fovy</code> - Output field of view in degrees along the vertical sensor axis.</dd>
  6442. <dd><code>focalLength</code> - Focal length of the lens in mm.</dd>
  6443. <dd><code>principalPoint</code> - Principal point in mm.</dd>
  6444. <dd><code>aspectRatio</code> - \(f_y/f_x\)
  6445. The function computes various useful camera characteristics from the previously estimated camera
  6446. matrix.
  6447. <b>Note:</b>
  6448. Do keep in mind that the unity measure 'mm' stands for whatever unit of measure one chooses for
  6449. the chessboard pitch (it can thus be any value).</dd>
  6450. </dl>
  6451. </li>
  6452. </ul>
  6453. <a name="checkChessboard-org.opencv.core.Mat-org.opencv.core.Size-">
  6454. <!-- -->
  6455. </a>
  6456. <ul class="blockList">
  6457. <li class="blockList">
  6458. <h4>checkChessboard</h4>
  6459. <pre>public static&nbsp;boolean&nbsp;checkChessboard(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  6460. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size)</pre>
  6461. </li>
  6462. </ul>
  6463. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6464. <!-- -->
  6465. </a>
  6466. <ul class="blockList">
  6467. <li class="blockList">
  6468. <h4>composeRT</h4>
  6469. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6470. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6471. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6472. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6473. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6474. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3)</pre>
  6475. <div class="block">Combines two rotation-and-shift transformations.</div>
  6476. <dl>
  6477. <dt><span class="paramLabel">Parameters:</span></dt>
  6478. <dd><code>rvec1</code> - First rotation vector.</dd>
  6479. <dd><code>tvec1</code> - First translation vector.</dd>
  6480. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6481. <dd><code>tvec2</code> - Second translation vector.</dd>
  6482. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6483. <dd><code>tvec3</code> - Output translation vector of the superposition.
  6484. The functions compute:
  6485. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6486. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6487. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6488. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6489. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6490. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6491. function that contains a matrix multiplication.</dd>
  6492. </dl>
  6493. </li>
  6494. </ul>
  6495. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6496. <!-- -->
  6497. </a>
  6498. <ul class="blockList">
  6499. <li class="blockList">
  6500. <h4>composeRT</h4>
  6501. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6502. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6503. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6504. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6505. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6506. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6507. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1)</pre>
  6508. <div class="block">Combines two rotation-and-shift transformations.</div>
  6509. <dl>
  6510. <dt><span class="paramLabel">Parameters:</span></dt>
  6511. <dd><code>rvec1</code> - First rotation vector.</dd>
  6512. <dd><code>tvec1</code> - First translation vector.</dd>
  6513. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6514. <dd><code>tvec2</code> - Second translation vector.</dd>
  6515. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6516. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6517. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1
  6518. The functions compute:
  6519. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6520. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6521. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6522. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6523. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6524. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6525. function that contains a matrix multiplication.</dd>
  6526. </dl>
  6527. </li>
  6528. </ul>
  6529. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6530. <!-- -->
  6531. </a>
  6532. <ul class="blockList">
  6533. <li class="blockList">
  6534. <h4>composeRT</h4>
  6535. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6536. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6537. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6538. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6539. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6540. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6541. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6542. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1)</pre>
  6543. <div class="block">Combines two rotation-and-shift transformations.</div>
  6544. <dl>
  6545. <dt><span class="paramLabel">Parameters:</span></dt>
  6546. <dd><code>rvec1</code> - First rotation vector.</dd>
  6547. <dd><code>tvec1</code> - First translation vector.</dd>
  6548. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6549. <dd><code>tvec2</code> - Second translation vector.</dd>
  6550. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6551. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6552. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6553. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1
  6554. The functions compute:
  6555. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6556. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6557. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6558. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6559. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6560. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6561. function that contains a matrix multiplication.</dd>
  6562. </dl>
  6563. </li>
  6564. </ul>
  6565. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6566. <!-- -->
  6567. </a>
  6568. <ul class="blockList">
  6569. <li class="blockList">
  6570. <h4>composeRT</h4>
  6571. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6572. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6573. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6574. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6575. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6576. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6577. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6578. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  6579. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2)</pre>
  6580. <div class="block">Combines two rotation-and-shift transformations.</div>
  6581. <dl>
  6582. <dt><span class="paramLabel">Parameters:</span></dt>
  6583. <dd><code>rvec1</code> - First rotation vector.</dd>
  6584. <dd><code>tvec1</code> - First translation vector.</dd>
  6585. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6586. <dd><code>tvec2</code> - Second translation vector.</dd>
  6587. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6588. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6589. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6590. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1</dd>
  6591. <dd><code>dr3dr2</code> - Optional output derivative of rvec3 with regard to rvec2
  6592. The functions compute:
  6593. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6594. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6595. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6596. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6597. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6598. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6599. function that contains a matrix multiplication.</dd>
  6600. </dl>
  6601. </li>
  6602. </ul>
  6603. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6604. <!-- -->
  6605. </a>
  6606. <ul class="blockList">
  6607. <li class="blockList">
  6608. <h4>composeRT</h4>
  6609. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6610. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6611. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6612. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6613. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6614. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6615. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6616. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  6617. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  6618. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2)</pre>
  6619. <div class="block">Combines two rotation-and-shift transformations.</div>
  6620. <dl>
  6621. <dt><span class="paramLabel">Parameters:</span></dt>
  6622. <dd><code>rvec1</code> - First rotation vector.</dd>
  6623. <dd><code>tvec1</code> - First translation vector.</dd>
  6624. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6625. <dd><code>tvec2</code> - Second translation vector.</dd>
  6626. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6627. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6628. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6629. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1</dd>
  6630. <dd><code>dr3dr2</code> - Optional output derivative of rvec3 with regard to rvec2</dd>
  6631. <dd><code>dr3dt2</code> - Optional output derivative of rvec3 with regard to tvec2
  6632. The functions compute:
  6633. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6634. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6635. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6636. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6637. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6638. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6639. function that contains a matrix multiplication.</dd>
  6640. </dl>
  6641. </li>
  6642. </ul>
  6643. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6644. <!-- -->
  6645. </a>
  6646. <ul class="blockList">
  6647. <li class="blockList">
  6648. <h4>composeRT</h4>
  6649. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6650. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6651. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6652. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6653. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6654. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6655. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6656. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  6657. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  6658. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  6659. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1)</pre>
  6660. <div class="block">Combines two rotation-and-shift transformations.</div>
  6661. <dl>
  6662. <dt><span class="paramLabel">Parameters:</span></dt>
  6663. <dd><code>rvec1</code> - First rotation vector.</dd>
  6664. <dd><code>tvec1</code> - First translation vector.</dd>
  6665. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6666. <dd><code>tvec2</code> - Second translation vector.</dd>
  6667. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6668. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6669. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6670. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1</dd>
  6671. <dd><code>dr3dr2</code> - Optional output derivative of rvec3 with regard to rvec2</dd>
  6672. <dd><code>dr3dt2</code> - Optional output derivative of rvec3 with regard to tvec2</dd>
  6673. <dd><code>dt3dr1</code> - Optional output derivative of tvec3 with regard to rvec1
  6674. The functions compute:
  6675. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6676. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6677. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6678. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6679. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6680. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6681. function that contains a matrix multiplication.</dd>
  6682. </dl>
  6683. </li>
  6684. </ul>
  6685. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6686. <!-- -->
  6687. </a>
  6688. <ul class="blockList">
  6689. <li class="blockList">
  6690. <h4>composeRT</h4>
  6691. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6692. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6693. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6694. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6695. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6696. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6697. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6698. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  6699. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  6700. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  6701. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1,
  6702. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt1)</pre>
  6703. <div class="block">Combines two rotation-and-shift transformations.</div>
  6704. <dl>
  6705. <dt><span class="paramLabel">Parameters:</span></dt>
  6706. <dd><code>rvec1</code> - First rotation vector.</dd>
  6707. <dd><code>tvec1</code> - First translation vector.</dd>
  6708. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6709. <dd><code>tvec2</code> - Second translation vector.</dd>
  6710. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6711. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6712. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6713. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1</dd>
  6714. <dd><code>dr3dr2</code> - Optional output derivative of rvec3 with regard to rvec2</dd>
  6715. <dd><code>dr3dt2</code> - Optional output derivative of rvec3 with regard to tvec2</dd>
  6716. <dd><code>dt3dr1</code> - Optional output derivative of tvec3 with regard to rvec1</dd>
  6717. <dd><code>dt3dt1</code> - Optional output derivative of tvec3 with regard to tvec1
  6718. The functions compute:
  6719. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6720. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6721. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6722. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6723. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6724. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6725. function that contains a matrix multiplication.</dd>
  6726. </dl>
  6727. </li>
  6728. </ul>
  6729. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6730. <!-- -->
  6731. </a>
  6732. <ul class="blockList">
  6733. <li class="blockList">
  6734. <h4>composeRT</h4>
  6735. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6736. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6737. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6738. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6739. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6740. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6741. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6742. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  6743. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  6744. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  6745. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1,
  6746. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt1,
  6747. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr2)</pre>
  6748. <div class="block">Combines two rotation-and-shift transformations.</div>
  6749. <dl>
  6750. <dt><span class="paramLabel">Parameters:</span></dt>
  6751. <dd><code>rvec1</code> - First rotation vector.</dd>
  6752. <dd><code>tvec1</code> - First translation vector.</dd>
  6753. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6754. <dd><code>tvec2</code> - Second translation vector.</dd>
  6755. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6756. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6757. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6758. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1</dd>
  6759. <dd><code>dr3dr2</code> - Optional output derivative of rvec3 with regard to rvec2</dd>
  6760. <dd><code>dr3dt2</code> - Optional output derivative of rvec3 with regard to tvec2</dd>
  6761. <dd><code>dt3dr1</code> - Optional output derivative of tvec3 with regard to rvec1</dd>
  6762. <dd><code>dt3dt1</code> - Optional output derivative of tvec3 with regard to tvec1</dd>
  6763. <dd><code>dt3dr2</code> - Optional output derivative of tvec3 with regard to rvec2
  6764. The functions compute:
  6765. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6766. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6767. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6768. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6769. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6770. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6771. function that contains a matrix multiplication.</dd>
  6772. </dl>
  6773. </li>
  6774. </ul>
  6775. <a name="composeRT-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6776. <!-- -->
  6777. </a>
  6778. <ul class="blockList">
  6779. <li class="blockList">
  6780. <h4>composeRT</h4>
  6781. <pre>public static&nbsp;void&nbsp;composeRT(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec1,
  6782. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec1,
  6783. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec2,
  6784. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec2,
  6785. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec3,
  6786. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec3,
  6787. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr1,
  6788. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt1,
  6789. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dr2,
  6790. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dr3dt2,
  6791. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr1,
  6792. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt1,
  6793. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dr2,
  6794. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dt3dt2)</pre>
  6795. <div class="block">Combines two rotation-and-shift transformations.</div>
  6796. <dl>
  6797. <dt><span class="paramLabel">Parameters:</span></dt>
  6798. <dd><code>rvec1</code> - First rotation vector.</dd>
  6799. <dd><code>tvec1</code> - First translation vector.</dd>
  6800. <dd><code>rvec2</code> - Second rotation vector.</dd>
  6801. <dd><code>tvec2</code> - Second translation vector.</dd>
  6802. <dd><code>rvec3</code> - Output rotation vector of the superposition.</dd>
  6803. <dd><code>tvec3</code> - Output translation vector of the superposition.</dd>
  6804. <dd><code>dr3dr1</code> - Optional output derivative of rvec3 with regard to rvec1</dd>
  6805. <dd><code>dr3dt1</code> - Optional output derivative of rvec3 with regard to tvec1</dd>
  6806. <dd><code>dr3dr2</code> - Optional output derivative of rvec3 with regard to rvec2</dd>
  6807. <dd><code>dr3dt2</code> - Optional output derivative of rvec3 with regard to tvec2</dd>
  6808. <dd><code>dt3dr1</code> - Optional output derivative of tvec3 with regard to rvec1</dd>
  6809. <dd><code>dt3dt1</code> - Optional output derivative of tvec3 with regard to tvec1</dd>
  6810. <dd><code>dt3dr2</code> - Optional output derivative of tvec3 with regard to rvec2</dd>
  6811. <dd><code>dt3dt2</code> - Optional output derivative of tvec3 with regard to tvec2
  6812. The functions compute:
  6813. \(\begin{array}{l} \texttt{rvec3} = \mathrm{rodrigues} ^{-1} \left ( \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \mathrm{rodrigues} ( \texttt{rvec1} ) \right ) \\ \texttt{tvec3} = \mathrm{rodrigues} ( \texttt{rvec2} ) \cdot \texttt{tvec1} + \texttt{tvec2} \end{array} ,\)
  6814. where \(\mathrm{rodrigues}\) denotes a rotation vector to a rotation matrix transformation, and
  6815. \(\mathrm{rodrigues}^{-1}\) denotes the inverse transformation. See #Rodrigues for details.
  6816. Also, the functions can compute the derivatives of the output vectors with regards to the input
  6817. vectors (see #matMulDeriv ). The functions are used inside #stereoCalibrate but can also be used in
  6818. your own code where Levenberg-Marquardt or another gradient-based solver is used to optimize a
  6819. function that contains a matrix multiplication.</dd>
  6820. </dl>
  6821. </li>
  6822. </ul>
  6823. <a name="computeCorrespondEpilines-org.opencv.core.Mat-int-org.opencv.core.Mat-org.opencv.core.Mat-">
  6824. <!-- -->
  6825. </a>
  6826. <ul class="blockList">
  6827. <li class="blockList">
  6828. <h4>computeCorrespondEpilines</h4>
  6829. <pre>public static&nbsp;void&nbsp;computeCorrespondEpilines(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points,
  6830. int&nbsp;whichImage,
  6831. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  6832. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;lines)</pre>
  6833. <div class="block">For points in an image of a stereo pair, computes the corresponding epilines in the other image.</div>
  6834. <dl>
  6835. <dt><span class="paramLabel">Parameters:</span></dt>
  6836. <dd><code>points</code> - Input points. \(N \times 1\) or \(1 \times N\) matrix of type CV_32FC2 or
  6837. vector&lt;Point2f&gt; .</dd>
  6838. <dd><code>whichImage</code> - Index of the image (1 or 2) that contains the points .</dd>
  6839. <dd><code>F</code> - Fundamental matrix that can be estimated using #findFundamentalMat or #stereoRectify .</dd>
  6840. <dd><code>lines</code> - Output vector of the epipolar lines corresponding to the points in the other image.
  6841. Each line \(ax + by + c=0\) is encoded by 3 numbers \((a, b, c)\) .
  6842. For every point in one of the two images of a stereo pair, the function finds the equation of the
  6843. corresponding epipolar line in the other image.
  6844. From the fundamental matrix definition (see #findFundamentalMat ), line \(l^{(2)}_i\) in the second
  6845. image for the point \(p^{(1)}_i\) in the first image (when whichImage=1 ) is computed as:
  6846. \(l^{(2)}_i = F p^{(1)}_i\)
  6847. And vice versa, when whichImage=2, \(l^{(1)}_i\) is computed from \(p^{(2)}_i\) as:
  6848. \(l^{(1)}_i = F^T p^{(2)}_i\)
  6849. Line coefficients are defined up to a scale. They are normalized so that \(a_i^2+b_i^2=1\) .</dd>
  6850. </dl>
  6851. </li>
  6852. </ul>
  6853. <a name="convertPointsFromHomogeneous-org.opencv.core.Mat-org.opencv.core.Mat-">
  6854. <!-- -->
  6855. </a>
  6856. <ul class="blockList">
  6857. <li class="blockList">
  6858. <h4>convertPointsFromHomogeneous</h4>
  6859. <pre>public static&nbsp;void&nbsp;convertPointsFromHomogeneous(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  6860. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</pre>
  6861. <div class="block">Converts points from homogeneous to Euclidean space.</div>
  6862. <dl>
  6863. <dt><span class="paramLabel">Parameters:</span></dt>
  6864. <dd><code>src</code> - Input vector of N-dimensional points.</dd>
  6865. <dd><code>dst</code> - Output vector of N-1-dimensional points.
  6866. The function converts points homogeneous to Euclidean space using perspective projection. That is,
  6867. each point (x1, x2, ... x(n-1), xn) is converted to (x1/xn, x2/xn, ..., x(n-1)/xn). When xn=0, the
  6868. output point coordinates will be (0,0,0,...).</dd>
  6869. </dl>
  6870. </li>
  6871. </ul>
  6872. <a name="convertPointsToHomogeneous-org.opencv.core.Mat-org.opencv.core.Mat-">
  6873. <!-- -->
  6874. </a>
  6875. <ul class="blockList">
  6876. <li class="blockList">
  6877. <h4>convertPointsToHomogeneous</h4>
  6878. <pre>public static&nbsp;void&nbsp;convertPointsToHomogeneous(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  6879. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</pre>
  6880. <div class="block">Converts points from Euclidean to homogeneous space.</div>
  6881. <dl>
  6882. <dt><span class="paramLabel">Parameters:</span></dt>
  6883. <dd><code>src</code> - Input vector of N-dimensional points.</dd>
  6884. <dd><code>dst</code> - Output vector of N+1-dimensional points.
  6885. The function converts points from Euclidean to homogeneous space by appending 1's to the tuple of
  6886. point coordinates. That is, each point (x1, x2, ..., xn) is converted to (x1, x2, ..., xn, 1).</dd>
  6887. </dl>
  6888. </li>
  6889. </ul>
  6890. <a name="correctMatches-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6891. <!-- -->
  6892. </a>
  6893. <ul class="blockList">
  6894. <li class="blockList">
  6895. <h4>correctMatches</h4>
  6896. <pre>public static&nbsp;void&nbsp;correctMatches(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  6897. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  6898. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  6899. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newPoints1,
  6900. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newPoints2)</pre>
  6901. <div class="block">Refines coordinates of corresponding points.</div>
  6902. <dl>
  6903. <dt><span class="paramLabel">Parameters:</span></dt>
  6904. <dd><code>F</code> - 3x3 fundamental matrix.</dd>
  6905. <dd><code>points1</code> - 1xN array containing the first set of points.</dd>
  6906. <dd><code>points2</code> - 1xN array containing the second set of points.</dd>
  6907. <dd><code>newPoints1</code> - The optimized points1.</dd>
  6908. <dd><code>newPoints2</code> - The optimized points2.
  6909. The function implements the Optimal Triangulation Method (see Multiple View Geometry CITE: HartleyZ00 for details).
  6910. For each given point correspondence points1[i] &lt;-&gt; points2[i], and a fundamental matrix F, it
  6911. computes the corrected correspondences newPoints1[i] &lt;-&gt; newPoints2[i] that minimize the geometric
  6912. error \(d(points1[i], newPoints1[i])^2 + d(points2[i],newPoints2[i])^2\) (where \(d(a,b)\) is the
  6913. geometric distance between points \(a\) and \(b\) ) subject to the epipolar constraint
  6914. \(newPoints2^T \cdot F \cdot newPoints1 = 0\) .</dd>
  6915. </dl>
  6916. </li>
  6917. </ul>
  6918. <a name="decomposeEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6919. <!-- -->
  6920. </a>
  6921. <ul class="blockList">
  6922. <li class="blockList">
  6923. <h4>decomposeEssentialMat</h4>
  6924. <pre>public static&nbsp;void&nbsp;decomposeEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  6925. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  6926. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  6927. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</pre>
  6928. <div class="block">Decompose an essential matrix to possible rotations and translation.</div>
  6929. <dl>
  6930. <dt><span class="paramLabel">Parameters:</span></dt>
  6931. <dd><code>E</code> - The input essential matrix.</dd>
  6932. <dd><code>R1</code> - One possible rotation matrix.</dd>
  6933. <dd><code>R2</code> - Another possible rotation matrix.</dd>
  6934. <dd><code>t</code> - One possible translation.
  6935. This function decomposes the essential matrix E using svd decomposition CITE: HartleyZ00. In
  6936. general, four possible poses exist for the decomposition of E. They are \([R_1, t]\),
  6937. \([R_1, -t]\), \([R_2, t]\), \([R_2, -t]\).
  6938. If E gives the epipolar constraint \([p_2; 1]^T A^{-T} E A^{-1} [p_1; 1] = 0\) between the image
  6939. points \(p_1\) in the first image and \(p_2\) in second image, then any of the tuples
  6940. \([R_1, t]\), \([R_1, -t]\), \([R_2, t]\), \([R_2, -t]\) is a change of basis from the first
  6941. camera's coordinate system to the second camera's coordinate system. However, by decomposing E, one
  6942. can only get the direction of the translation. For this reason, the translation t is returned with
  6943. unit length.</dd>
  6944. </dl>
  6945. </li>
  6946. </ul>
  6947. <a name="decomposeHomographyMat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-java.util.List-">
  6948. <!-- -->
  6949. </a>
  6950. <ul class="blockList">
  6951. <li class="blockList">
  6952. <h4>decomposeHomographyMat</h4>
  6953. <pre>public static&nbsp;int&nbsp;decomposeHomographyMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H,
  6954. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  6955. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rotations,
  6956. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;translations,
  6957. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;normals)</pre>
  6958. <div class="block">Decompose a homography matrix to rotation(s), translation(s) and plane normal(s).</div>
  6959. <dl>
  6960. <dt><span class="paramLabel">Parameters:</span></dt>
  6961. <dd><code>H</code> - The input homography matrix between two images.</dd>
  6962. <dd><code>K</code> - The input camera intrinsic matrix.</dd>
  6963. <dd><code>rotations</code> - Array of rotation matrices.</dd>
  6964. <dd><code>translations</code> - Array of translation matrices.</dd>
  6965. <dd><code>normals</code> - Array of plane normal matrices.
  6966. This function extracts relative camera motion between two views of a planar object and returns up to
  6967. four mathematical solution tuples of rotation, translation, and plane normal. The decomposition of
  6968. the homography matrix H is described in detail in CITE: Malis2007.
  6969. If the homography H, induced by the plane, gives the constraint
  6970. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\) on the source image points
  6971. \(p_i\) and the destination image points \(p'_i\), then the tuple of rotations[k] and
  6972. translations[k] is a change of basis from the source camera's coordinate system to the destination
  6973. camera's coordinate system. However, by decomposing H, one can only get the translation normalized
  6974. by the (typically unknown) depth of the scene, i.e. its direction but with normalized length.
  6975. If point correspondences are available, at least two solutions may further be invalidated, by
  6976. applying positive depth constraint, i.e. all points must be in front of the camera.</dd>
  6977. <dt><span class="returnLabel">Returns:</span></dt>
  6978. <dd>automatically generated</dd>
  6979. </dl>
  6980. </li>
  6981. </ul>
  6982. <a name="decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  6983. <!-- -->
  6984. </a>
  6985. <ul class="blockList">
  6986. <li class="blockList">
  6987. <h4>decomposeProjectionMatrix</h4>
  6988. <pre>public static&nbsp;void&nbsp;decomposeProjectionMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  6989. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  6990. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  6991. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect)</pre>
  6992. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  6993. <dl>
  6994. <dt><span class="paramLabel">Parameters:</span></dt>
  6995. <dd><code>projMatrix</code> - 3x4 input projection matrix P.</dd>
  6996. <dd><code>cameraMatrix</code> - Output 3x3 camera intrinsic matrix \(\cameramatrix{A}\).</dd>
  6997. <dd><code>rotMatrix</code> - Output 3x3 external rotation matrix R.</dd>
  6998. <dd><code>transVect</code> - Output 4x1 translation vector T.
  6999. degrees.
  7000. The function computes a decomposition of a projection matrix into a calibration and a rotation
  7001. matrix and the position of a camera.
  7002. It optionally returns three rotation matrices, one for each axis, and three Euler angles that could
  7003. be used in OpenGL. Note, there is always more than one sequence of rotations about the three
  7004. principal axes that results in the same orientation of an object, e.g. see CITE: Slabaugh . Returned
  7005. tree rotation matrices and corresponding three Euler angles are only one of the possible solutions.
  7006. The function is based on #RQDecomp3x3 .</dd>
  7007. </dl>
  7008. </li>
  7009. </ul>
  7010. <a name="decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  7011. <!-- -->
  7012. </a>
  7013. <ul class="blockList">
  7014. <li class="blockList">
  7015. <h4>decomposeProjectionMatrix</h4>
  7016. <pre>public static&nbsp;void&nbsp;decomposeProjectionMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  7017. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  7018. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  7019. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  7020. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX)</pre>
  7021. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  7022. <dl>
  7023. <dt><span class="paramLabel">Parameters:</span></dt>
  7024. <dd><code>projMatrix</code> - 3x4 input projection matrix P.</dd>
  7025. <dd><code>cameraMatrix</code> - Output 3x3 camera intrinsic matrix \(\cameramatrix{A}\).</dd>
  7026. <dd><code>rotMatrix</code> - Output 3x3 external rotation matrix R.</dd>
  7027. <dd><code>transVect</code> - Output 4x1 translation vector T.</dd>
  7028. <dd><code>rotMatrixX</code> - Optional 3x3 rotation matrix around x-axis.
  7029. degrees.
  7030. The function computes a decomposition of a projection matrix into a calibration and a rotation
  7031. matrix and the position of a camera.
  7032. It optionally returns three rotation matrices, one for each axis, and three Euler angles that could
  7033. be used in OpenGL. Note, there is always more than one sequence of rotations about the three
  7034. principal axes that results in the same orientation of an object, e.g. see CITE: Slabaugh . Returned
  7035. tree rotation matrices and corresponding three Euler angles are only one of the possible solutions.
  7036. The function is based on #RQDecomp3x3 .</dd>
  7037. </dl>
  7038. </li>
  7039. </ul>
  7040. <a name="decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  7041. <!-- -->
  7042. </a>
  7043. <ul class="blockList">
  7044. <li class="blockList">
  7045. <h4>decomposeProjectionMatrix</h4>
  7046. <pre>public static&nbsp;void&nbsp;decomposeProjectionMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  7047. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  7048. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  7049. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  7050. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX,
  7051. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixY)</pre>
  7052. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  7053. <dl>
  7054. <dt><span class="paramLabel">Parameters:</span></dt>
  7055. <dd><code>projMatrix</code> - 3x4 input projection matrix P.</dd>
  7056. <dd><code>cameraMatrix</code> - Output 3x3 camera intrinsic matrix \(\cameramatrix{A}\).</dd>
  7057. <dd><code>rotMatrix</code> - Output 3x3 external rotation matrix R.</dd>
  7058. <dd><code>transVect</code> - Output 4x1 translation vector T.</dd>
  7059. <dd><code>rotMatrixX</code> - Optional 3x3 rotation matrix around x-axis.</dd>
  7060. <dd><code>rotMatrixY</code> - Optional 3x3 rotation matrix around y-axis.
  7061. degrees.
  7062. The function computes a decomposition of a projection matrix into a calibration and a rotation
  7063. matrix and the position of a camera.
  7064. It optionally returns three rotation matrices, one for each axis, and three Euler angles that could
  7065. be used in OpenGL. Note, there is always more than one sequence of rotations about the three
  7066. principal axes that results in the same orientation of an object, e.g. see CITE: Slabaugh . Returned
  7067. tree rotation matrices and corresponding three Euler angles are only one of the possible solutions.
  7068. The function is based on #RQDecomp3x3 .</dd>
  7069. </dl>
  7070. </li>
  7071. </ul>
  7072. <a name="decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  7073. <!-- -->
  7074. </a>
  7075. <ul class="blockList">
  7076. <li class="blockList">
  7077. <h4>decomposeProjectionMatrix</h4>
  7078. <pre>public static&nbsp;void&nbsp;decomposeProjectionMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  7079. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  7080. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  7081. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  7082. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX,
  7083. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixY,
  7084. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixZ)</pre>
  7085. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  7086. <dl>
  7087. <dt><span class="paramLabel">Parameters:</span></dt>
  7088. <dd><code>projMatrix</code> - 3x4 input projection matrix P.</dd>
  7089. <dd><code>cameraMatrix</code> - Output 3x3 camera intrinsic matrix \(\cameramatrix{A}\).</dd>
  7090. <dd><code>rotMatrix</code> - Output 3x3 external rotation matrix R.</dd>
  7091. <dd><code>transVect</code> - Output 4x1 translation vector T.</dd>
  7092. <dd><code>rotMatrixX</code> - Optional 3x3 rotation matrix around x-axis.</dd>
  7093. <dd><code>rotMatrixY</code> - Optional 3x3 rotation matrix around y-axis.</dd>
  7094. <dd><code>rotMatrixZ</code> - Optional 3x3 rotation matrix around z-axis.
  7095. degrees.
  7096. The function computes a decomposition of a projection matrix into a calibration and a rotation
  7097. matrix and the position of a camera.
  7098. It optionally returns three rotation matrices, one for each axis, and three Euler angles that could
  7099. be used in OpenGL. Note, there is always more than one sequence of rotations about the three
  7100. principal axes that results in the same orientation of an object, e.g. see CITE: Slabaugh . Returned
  7101. tree rotation matrices and corresponding three Euler angles are only one of the possible solutions.
  7102. The function is based on #RQDecomp3x3 .</dd>
  7103. </dl>
  7104. </li>
  7105. </ul>
  7106. <a name="decomposeProjectionMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  7107. <!-- -->
  7108. </a>
  7109. <ul class="blockList">
  7110. <li class="blockList">
  7111. <h4>decomposeProjectionMatrix</h4>
  7112. <pre>public static&nbsp;void&nbsp;decomposeProjectionMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatrix,
  7113. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  7114. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrix,
  7115. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;transVect,
  7116. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixX,
  7117. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixY,
  7118. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rotMatrixZ,
  7119. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;eulerAngles)</pre>
  7120. <div class="block">Decomposes a projection matrix into a rotation matrix and a camera intrinsic matrix.</div>
  7121. <dl>
  7122. <dt><span class="paramLabel">Parameters:</span></dt>
  7123. <dd><code>projMatrix</code> - 3x4 input projection matrix P.</dd>
  7124. <dd><code>cameraMatrix</code> - Output 3x3 camera intrinsic matrix \(\cameramatrix{A}\).</dd>
  7125. <dd><code>rotMatrix</code> - Output 3x3 external rotation matrix R.</dd>
  7126. <dd><code>transVect</code> - Output 4x1 translation vector T.</dd>
  7127. <dd><code>rotMatrixX</code> - Optional 3x3 rotation matrix around x-axis.</dd>
  7128. <dd><code>rotMatrixY</code> - Optional 3x3 rotation matrix around y-axis.</dd>
  7129. <dd><code>rotMatrixZ</code> - Optional 3x3 rotation matrix around z-axis.</dd>
  7130. <dd><code>eulerAngles</code> - Optional three-element vector containing three Euler angles of rotation in
  7131. degrees.
  7132. The function computes a decomposition of a projection matrix into a calibration and a rotation
  7133. matrix and the position of a camera.
  7134. It optionally returns three rotation matrices, one for each axis, and three Euler angles that could
  7135. be used in OpenGL. Note, there is always more than one sequence of rotations about the three
  7136. principal axes that results in the same orientation of an object, e.g. see CITE: Slabaugh . Returned
  7137. tree rotation matrices and corresponding three Euler angles are only one of the possible solutions.
  7138. The function is based on #RQDecomp3x3 .</dd>
  7139. </dl>
  7140. </li>
  7141. </ul>
  7142. <a name="drawChessboardCorners-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.MatOfPoint2f-boolean-">
  7143. <!-- -->
  7144. </a>
  7145. <ul class="blockList">
  7146. <li class="blockList">
  7147. <h4>drawChessboardCorners</h4>
  7148. <pre>public static&nbsp;void&nbsp;drawChessboardCorners(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  7149. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  7150. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;corners,
  7151. boolean&nbsp;patternWasFound)</pre>
  7152. <div class="block">Renders the detected chessboard corners.</div>
  7153. <dl>
  7154. <dt><span class="paramLabel">Parameters:</span></dt>
  7155. <dd><code>image</code> - Destination image. It must be an 8-bit color image.</dd>
  7156. <dd><code>patternSize</code> - Number of inner corners per a chessboard row and column
  7157. (patternSize = cv::Size(points_per_row,points_per_column)).</dd>
  7158. <dd><code>corners</code> - Array of detected corners, the output of #findChessboardCorners.</dd>
  7159. <dd><code>patternWasFound</code> - Parameter indicating whether the complete board was found or not. The
  7160. return value of #findChessboardCorners should be passed here.
  7161. The function draws individual chessboard corners detected either as red circles if the board was not
  7162. found, or as colored corners connected with lines if the board was found.</dd>
  7163. </dl>
  7164. </li>
  7165. </ul>
  7166. <a name="drawFrameAxes-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-float-">
  7167. <!-- -->
  7168. </a>
  7169. <ul class="blockList">
  7170. <li class="blockList">
  7171. <h4>drawFrameAxes</h4>
  7172. <pre>public static&nbsp;void&nbsp;drawFrameAxes(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  7173. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  7174. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  7175. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  7176. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  7177. float&nbsp;length)</pre>
  7178. <div class="block">Draw axes of the world/object coordinate system from pose estimation. SEE: solvePnP</div>
  7179. <dl>
  7180. <dt><span class="paramLabel">Parameters:</span></dt>
  7181. <dd><code>image</code> - Input/output image. It must have 1 or 3 channels. The number of channels is not altered.</dd>
  7182. <dd><code>cameraMatrix</code> - Input 3x3 floating-point matrix of camera intrinsic parameters.
  7183. \(\cameramatrix{A}\)</dd>
  7184. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  7185. \(\distcoeffs\). If the vector is empty, the zero distortion coefficients are assumed.</dd>
  7186. <dd><code>rvec</code> - Rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  7187. the model coordinate system to the camera coordinate system.</dd>
  7188. <dd><code>tvec</code> - Translation vector.</dd>
  7189. <dd><code>length</code> - Length of the painted axes in the same unit than tvec (usually in meters).
  7190. This function draws the axes of the world/object coordinate system w.r.t. to the camera frame.
  7191. OX is drawn in red, OY in green and OZ in blue.</dd>
  7192. </dl>
  7193. </li>
  7194. </ul>
  7195. <a name="drawFrameAxes-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-float-int-">
  7196. <!-- -->
  7197. </a>
  7198. <ul class="blockList">
  7199. <li class="blockList">
  7200. <h4>drawFrameAxes</h4>
  7201. <pre>public static&nbsp;void&nbsp;drawFrameAxes(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  7202. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  7203. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  7204. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  7205. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  7206. float&nbsp;length,
  7207. int&nbsp;thickness)</pre>
  7208. <div class="block">Draw axes of the world/object coordinate system from pose estimation. SEE: solvePnP</div>
  7209. <dl>
  7210. <dt><span class="paramLabel">Parameters:</span></dt>
  7211. <dd><code>image</code> - Input/output image. It must have 1 or 3 channels. The number of channels is not altered.</dd>
  7212. <dd><code>cameraMatrix</code> - Input 3x3 floating-point matrix of camera intrinsic parameters.
  7213. \(\cameramatrix{A}\)</dd>
  7214. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  7215. \(\distcoeffs\). If the vector is empty, the zero distortion coefficients are assumed.</dd>
  7216. <dd><code>rvec</code> - Rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  7217. the model coordinate system to the camera coordinate system.</dd>
  7218. <dd><code>tvec</code> - Translation vector.</dd>
  7219. <dd><code>length</code> - Length of the painted axes in the same unit than tvec (usually in meters).</dd>
  7220. <dd><code>thickness</code> - Line thickness of the painted axes.
  7221. This function draws the axes of the world/object coordinate system w.r.t. to the camera frame.
  7222. OX is drawn in red, OY in green and OZ in blue.</dd>
  7223. </dl>
  7224. </li>
  7225. </ul>
  7226. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-">
  7227. <!-- -->
  7228. </a>
  7229. <ul class="blockList">
  7230. <li class="blockList">
  7231. <h4>estimateAffine2D</h4>
  7232. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7233. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to)</pre>
  7234. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7235. It computes
  7236. \(
  7237. \begin{bmatrix}
  7238. x\\
  7239. y\\
  7240. \end{bmatrix}
  7241. =
  7242. \begin{bmatrix}
  7243. a_{11} &amp; a_{12}\\
  7244. a_{21} &amp; a_{22}\\
  7245. \end{bmatrix}
  7246. \begin{bmatrix}
  7247. X\\
  7248. Y\\
  7249. \end{bmatrix}
  7250. +
  7251. \begin{bmatrix}
  7252. b_1\\
  7253. b_2\\
  7254. \end{bmatrix}
  7255. \)</div>
  7256. <dl>
  7257. <dt><span class="paramLabel">Parameters:</span></dt>
  7258. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7259. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).
  7260. <ul>
  7261. <li>
  7262. REF: RANSAC - RANSAC-based robust method
  7263. </li>
  7264. <li>
  7265. REF: LMEDS - Least-Median robust method
  7266. RANSAC is the default method.
  7267. </li>
  7268. </ul>
  7269. a point as an inlier. Applies only to RANSAC.
  7270. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7271. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7272. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7273. <dt><span class="returnLabel">Returns:</span></dt>
  7274. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7275. could not be estimated. The returned matrix has the following form:
  7276. \(
  7277. \begin{bmatrix}
  7278. a_{11} &amp; a_{12} &amp; b_1\\
  7279. a_{21} &amp; a_{22} &amp; b_2\\
  7280. \end{bmatrix}
  7281. \)
  7282. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7283. selected robust algorithm.
  7284. The computed transformation is then refined further (using only inliers) with the
  7285. Levenberg-Marquardt method to reduce the re-projection error even more.
  7286. <b>Note:</b>
  7287. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7288. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7289. correctly only when there are more than 50% of inliers.
  7290. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7291. </dl>
  7292. </li>
  7293. </ul>
  7294. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  7295. <!-- -->
  7296. </a>
  7297. <ul class="blockList">
  7298. <li class="blockList">
  7299. <h4>estimateAffine2D</h4>
  7300. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7301. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  7302. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</pre>
  7303. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7304. It computes
  7305. \(
  7306. \begin{bmatrix}
  7307. x\\
  7308. y\\
  7309. \end{bmatrix}
  7310. =
  7311. \begin{bmatrix}
  7312. a_{11} &amp; a_{12}\\
  7313. a_{21} &amp; a_{22}\\
  7314. \end{bmatrix}
  7315. \begin{bmatrix}
  7316. X\\
  7317. Y\\
  7318. \end{bmatrix}
  7319. +
  7320. \begin{bmatrix}
  7321. b_1\\
  7322. b_2\\
  7323. \end{bmatrix}
  7324. \)</div>
  7325. <dl>
  7326. <dt><span class="paramLabel">Parameters:</span></dt>
  7327. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7328. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).</dd>
  7329. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).
  7330. <ul>
  7331. <li>
  7332. REF: RANSAC - RANSAC-based robust method
  7333. </li>
  7334. <li>
  7335. REF: LMEDS - Least-Median robust method
  7336. RANSAC is the default method.
  7337. </li>
  7338. </ul>
  7339. a point as an inlier. Applies only to RANSAC.
  7340. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7341. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7342. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7343. <dt><span class="returnLabel">Returns:</span></dt>
  7344. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7345. could not be estimated. The returned matrix has the following form:
  7346. \(
  7347. \begin{bmatrix}
  7348. a_{11} &amp; a_{12} &amp; b_1\\
  7349. a_{21} &amp; a_{22} &amp; b_2\\
  7350. \end{bmatrix}
  7351. \)
  7352. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7353. selected robust algorithm.
  7354. The computed transformation is then refined further (using only inliers) with the
  7355. Levenberg-Marquardt method to reduce the re-projection error even more.
  7356. <b>Note:</b>
  7357. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7358. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7359. correctly only when there are more than 50% of inliers.
  7360. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7361. </dl>
  7362. </li>
  7363. </ul>
  7364. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  7365. <!-- -->
  7366. </a>
  7367. <ul class="blockList">
  7368. <li class="blockList">
  7369. <h4>estimateAffine2D</h4>
  7370. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7371. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  7372. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7373. int&nbsp;method)</pre>
  7374. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7375. It computes
  7376. \(
  7377. \begin{bmatrix}
  7378. x\\
  7379. y\\
  7380. \end{bmatrix}
  7381. =
  7382. \begin{bmatrix}
  7383. a_{11} &amp; a_{12}\\
  7384. a_{21} &amp; a_{22}\\
  7385. \end{bmatrix}
  7386. \begin{bmatrix}
  7387. X\\
  7388. Y\\
  7389. \end{bmatrix}
  7390. +
  7391. \begin{bmatrix}
  7392. b_1\\
  7393. b_2\\
  7394. \end{bmatrix}
  7395. \)</div>
  7396. <dl>
  7397. <dt><span class="paramLabel">Parameters:</span></dt>
  7398. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7399. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).</dd>
  7400. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  7401. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  7402. <ul>
  7403. <li>
  7404. REF: RANSAC - RANSAC-based robust method
  7405. </li>
  7406. <li>
  7407. REF: LMEDS - Least-Median robust method
  7408. RANSAC is the default method.
  7409. </li>
  7410. </ul>
  7411. a point as an inlier. Applies only to RANSAC.
  7412. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7413. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7414. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7415. <dt><span class="returnLabel">Returns:</span></dt>
  7416. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7417. could not be estimated. The returned matrix has the following form:
  7418. \(
  7419. \begin{bmatrix}
  7420. a_{11} &amp; a_{12} &amp; b_1\\
  7421. a_{21} &amp; a_{22} &amp; b_2\\
  7422. \end{bmatrix}
  7423. \)
  7424. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7425. selected robust algorithm.
  7426. The computed transformation is then refined further (using only inliers) with the
  7427. Levenberg-Marquardt method to reduce the re-projection error even more.
  7428. <b>Note:</b>
  7429. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7430. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7431. correctly only when there are more than 50% of inliers.
  7432. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7433. </dl>
  7434. </li>
  7435. </ul>
  7436. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">
  7437. <!-- -->
  7438. </a>
  7439. <ul class="blockList">
  7440. <li class="blockList">
  7441. <h4>estimateAffine2D</h4>
  7442. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7443. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  7444. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7445. int&nbsp;method,
  7446. double&nbsp;ransacReprojThreshold)</pre>
  7447. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7448. It computes
  7449. \(
  7450. \begin{bmatrix}
  7451. x\\
  7452. y\\
  7453. \end{bmatrix}
  7454. =
  7455. \begin{bmatrix}
  7456. a_{11} &amp; a_{12}\\
  7457. a_{21} &amp; a_{22}\\
  7458. \end{bmatrix}
  7459. \begin{bmatrix}
  7460. X\\
  7461. Y\\
  7462. \end{bmatrix}
  7463. +
  7464. \begin{bmatrix}
  7465. b_1\\
  7466. b_2\\
  7467. \end{bmatrix}
  7468. \)</div>
  7469. <dl>
  7470. <dt><span class="paramLabel">Parameters:</span></dt>
  7471. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7472. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).</dd>
  7473. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  7474. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  7475. <ul>
  7476. <li>
  7477. REF: RANSAC - RANSAC-based robust method
  7478. </li>
  7479. <li>
  7480. REF: LMEDS - Least-Median robust method
  7481. RANSAC is the default method.
  7482. </li>
  7483. </ul></dd>
  7484. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  7485. a point as an inlier. Applies only to RANSAC.
  7486. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7487. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7488. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7489. <dt><span class="returnLabel">Returns:</span></dt>
  7490. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7491. could not be estimated. The returned matrix has the following form:
  7492. \(
  7493. \begin{bmatrix}
  7494. a_{11} &amp; a_{12} &amp; b_1\\
  7495. a_{21} &amp; a_{22} &amp; b_2\\
  7496. \end{bmatrix}
  7497. \)
  7498. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7499. selected robust algorithm.
  7500. The computed transformation is then refined further (using only inliers) with the
  7501. Levenberg-Marquardt method to reduce the re-projection error even more.
  7502. <b>Note:</b>
  7503. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7504. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7505. correctly only when there are more than 50% of inliers.
  7506. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7507. </dl>
  7508. </li>
  7509. </ul>
  7510. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-">
  7511. <!-- -->
  7512. </a>
  7513. <ul class="blockList">
  7514. <li class="blockList">
  7515. <h4>estimateAffine2D</h4>
  7516. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7517. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  7518. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7519. int&nbsp;method,
  7520. double&nbsp;ransacReprojThreshold,
  7521. long&nbsp;maxIters)</pre>
  7522. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7523. It computes
  7524. \(
  7525. \begin{bmatrix}
  7526. x\\
  7527. y\\
  7528. \end{bmatrix}
  7529. =
  7530. \begin{bmatrix}
  7531. a_{11} &amp; a_{12}\\
  7532. a_{21} &amp; a_{22}\\
  7533. \end{bmatrix}
  7534. \begin{bmatrix}
  7535. X\\
  7536. Y\\
  7537. \end{bmatrix}
  7538. +
  7539. \begin{bmatrix}
  7540. b_1\\
  7541. b_2\\
  7542. \end{bmatrix}
  7543. \)</div>
  7544. <dl>
  7545. <dt><span class="paramLabel">Parameters:</span></dt>
  7546. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7547. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).</dd>
  7548. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  7549. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  7550. <ul>
  7551. <li>
  7552. REF: RANSAC - RANSAC-based robust method
  7553. </li>
  7554. <li>
  7555. REF: LMEDS - Least-Median robust method
  7556. RANSAC is the default method.
  7557. </li>
  7558. </ul></dd>
  7559. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  7560. a point as an inlier. Applies only to RANSAC.</dd>
  7561. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  7562. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7563. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7564. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7565. <dt><span class="returnLabel">Returns:</span></dt>
  7566. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7567. could not be estimated. The returned matrix has the following form:
  7568. \(
  7569. \begin{bmatrix}
  7570. a_{11} &amp; a_{12} &amp; b_1\\
  7571. a_{21} &amp; a_{22} &amp; b_2\\
  7572. \end{bmatrix}
  7573. \)
  7574. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7575. selected robust algorithm.
  7576. The computed transformation is then refined further (using only inliers) with the
  7577. Levenberg-Marquardt method to reduce the re-projection error even more.
  7578. <b>Note:</b>
  7579. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7580. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7581. correctly only when there are more than 50% of inliers.
  7582. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7583. </dl>
  7584. </li>
  7585. </ul>
  7586. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-">
  7587. <!-- -->
  7588. </a>
  7589. <ul class="blockList">
  7590. <li class="blockList">
  7591. <h4>estimateAffine2D</h4>
  7592. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7593. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  7594. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7595. int&nbsp;method,
  7596. double&nbsp;ransacReprojThreshold,
  7597. long&nbsp;maxIters,
  7598. double&nbsp;confidence)</pre>
  7599. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7600. It computes
  7601. \(
  7602. \begin{bmatrix}
  7603. x\\
  7604. y\\
  7605. \end{bmatrix}
  7606. =
  7607. \begin{bmatrix}
  7608. a_{11} &amp; a_{12}\\
  7609. a_{21} &amp; a_{22}\\
  7610. \end{bmatrix}
  7611. \begin{bmatrix}
  7612. X\\
  7613. Y\\
  7614. \end{bmatrix}
  7615. +
  7616. \begin{bmatrix}
  7617. b_1\\
  7618. b_2\\
  7619. \end{bmatrix}
  7620. \)</div>
  7621. <dl>
  7622. <dt><span class="paramLabel">Parameters:</span></dt>
  7623. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7624. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).</dd>
  7625. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  7626. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  7627. <ul>
  7628. <li>
  7629. REF: RANSAC - RANSAC-based robust method
  7630. </li>
  7631. <li>
  7632. REF: LMEDS - Least-Median robust method
  7633. RANSAC is the default method.
  7634. </li>
  7635. </ul></dd>
  7636. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  7637. a point as an inlier. Applies only to RANSAC.</dd>
  7638. <dd><code>maxIters</code> - The maximum number of robust method iterations.</dd>
  7639. <dd><code>confidence</code> - Confidence level, between 0 and 1, for the estimated transformation. Anything
  7640. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7641. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7642. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7643. <dt><span class="returnLabel">Returns:</span></dt>
  7644. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7645. could not be estimated. The returned matrix has the following form:
  7646. \(
  7647. \begin{bmatrix}
  7648. a_{11} &amp; a_{12} &amp; b_1\\
  7649. a_{21} &amp; a_{22} &amp; b_2\\
  7650. \end{bmatrix}
  7651. \)
  7652. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7653. selected robust algorithm.
  7654. The computed transformation is then refined further (using only inliers) with the
  7655. Levenberg-Marquardt method to reduce the re-projection error even more.
  7656. <b>Note:</b>
  7657. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7658. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7659. correctly only when there are more than 50% of inliers.
  7660. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7661. </dl>
  7662. </li>
  7663. </ul>
  7664. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-long-">
  7665. <!-- -->
  7666. </a>
  7667. <ul class="blockList">
  7668. <li class="blockList">
  7669. <h4>estimateAffine2D</h4>
  7670. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  7671. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  7672. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7673. int&nbsp;method,
  7674. double&nbsp;ransacReprojThreshold,
  7675. long&nbsp;maxIters,
  7676. double&nbsp;confidence,
  7677. long&nbsp;refineIters)</pre>
  7678. <div class="block">Computes an optimal affine transformation between two 2D point sets.
  7679. It computes
  7680. \(
  7681. \begin{bmatrix}
  7682. x\\
  7683. y\\
  7684. \end{bmatrix}
  7685. =
  7686. \begin{bmatrix}
  7687. a_{11} &amp; a_{12}\\
  7688. a_{21} &amp; a_{22}\\
  7689. \end{bmatrix}
  7690. \begin{bmatrix}
  7691. X\\
  7692. Y\\
  7693. \end{bmatrix}
  7694. +
  7695. \begin{bmatrix}
  7696. b_1\\
  7697. b_2\\
  7698. \end{bmatrix}
  7699. \)</div>
  7700. <dl>
  7701. <dt><span class="paramLabel">Parameters:</span></dt>
  7702. <dd><code>from</code> - First input 2D point set containing \((X,Y)\).</dd>
  7703. <dd><code>to</code> - Second input 2D point set containing \((x,y)\).</dd>
  7704. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  7705. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  7706. <ul>
  7707. <li>
  7708. REF: RANSAC - RANSAC-based robust method
  7709. </li>
  7710. <li>
  7711. REF: LMEDS - Least-Median robust method
  7712. RANSAC is the default method.
  7713. </li>
  7714. </ul></dd>
  7715. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  7716. a point as an inlier. Applies only to RANSAC.</dd>
  7717. <dd><code>maxIters</code> - The maximum number of robust method iterations.</dd>
  7718. <dd><code>confidence</code> - Confidence level, between 0 and 1, for the estimated transformation. Anything
  7719. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7720. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.</dd>
  7721. <dd><code>refineIters</code> - Maximum number of iterations of refining algorithm (Levenberg-Marquardt).
  7722. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  7723. <dt><span class="returnLabel">Returns:</span></dt>
  7724. <dd>Output 2D affine transformation matrix \(2 \times 3\) or empty matrix if transformation
  7725. could not be estimated. The returned matrix has the following form:
  7726. \(
  7727. \begin{bmatrix}
  7728. a_{11} &amp; a_{12} &amp; b_1\\
  7729. a_{21} &amp; a_{22} &amp; b_2\\
  7730. \end{bmatrix}
  7731. \)
  7732. The function estimates an optimal 2D affine transformation between two 2D point sets using the
  7733. selected robust algorithm.
  7734. The computed transformation is then refined further (using only inliers) with the
  7735. Levenberg-Marquardt method to reduce the re-projection error even more.
  7736. <b>Note:</b>
  7737. The RANSAC method can handle practically any ratio of outliers but needs a threshold to
  7738. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  7739. correctly only when there are more than 50% of inliers.
  7740. SEE: estimateAffinePartial2D, getAffineTransform</dd>
  7741. </dl>
  7742. </li>
  7743. </ul>
  7744. <a name="estimateAffine2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">
  7745. <!-- -->
  7746. </a>
  7747. <ul class="blockList">
  7748. <li class="blockList">
  7749. <h4>estimateAffine2D</h4>
  7750. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pts1,
  7751. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pts2,
  7752. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7753. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</pre>
  7754. </li>
  7755. </ul>
  7756. <a name="estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-">
  7757. <!-- -->
  7758. </a>
  7759. <ul class="blockList">
  7760. <li class="blockList">
  7761. <h4>estimateAffine3D</h4>
  7762. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  7763. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</pre>
  7764. <div class="block">Computes an optimal affine transformation between two 3D point sets.
  7765. It computes \(R,s,t\) minimizing \(\sum{i} dst_i - c \cdot R \cdot src_i \)
  7766. where \(R\) is a 3x3 rotation matrix, \(t\) is a 3x1 translation vector and \(s\) is a
  7767. scalar size value. This is an implementation of the algorithm by Umeyama \cite umeyama1991least .
  7768. The estimated affine transform has a homogeneous scale which is a subclass of affine
  7769. transformations with 7 degrees of freedom. The paired point sets need to comprise at least 3
  7770. points each.</div>
  7771. <dl>
  7772. <dt><span class="paramLabel">Parameters:</span></dt>
  7773. <dd><code>src</code> - First input 3D point set.</dd>
  7774. <dd><code>dst</code> - Second input 3D point set.
  7775. Else the pointed-to variable will be set to the optimal scale.
  7776. This might be unwanted, e.g. when optimizing a transform between a right- and a
  7777. left-handed coordinate system.</dd>
  7778. <dt><span class="returnLabel">Returns:</span></dt>
  7779. <dd>3D affine transformation matrix \(3 \times 4\) of the form
  7780. \(T =
  7781. \begin{bmatrix}
  7782. R &amp; t\\
  7783. \end{bmatrix}
  7784. \)</dd>
  7785. </dl>
  7786. </li>
  7787. </ul>
  7788. <a name="estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-double:A-">
  7789. <!-- -->
  7790. </a>
  7791. <ul class="blockList">
  7792. <li class="blockList">
  7793. <h4>estimateAffine3D</h4>
  7794. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  7795. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  7796. double[]&nbsp;scale)</pre>
  7797. <div class="block">Computes an optimal affine transformation between two 3D point sets.
  7798. It computes \(R,s,t\) minimizing \(\sum{i} dst_i - c \cdot R \cdot src_i \)
  7799. where \(R\) is a 3x3 rotation matrix, \(t\) is a 3x1 translation vector and \(s\) is a
  7800. scalar size value. This is an implementation of the algorithm by Umeyama \cite umeyama1991least .
  7801. The estimated affine transform has a homogeneous scale which is a subclass of affine
  7802. transformations with 7 degrees of freedom. The paired point sets need to comprise at least 3
  7803. points each.</div>
  7804. <dl>
  7805. <dt><span class="paramLabel">Parameters:</span></dt>
  7806. <dd><code>src</code> - First input 3D point set.</dd>
  7807. <dd><code>dst</code> - Second input 3D point set.</dd>
  7808. <dd><code>scale</code> - If null is passed, the scale parameter c will be assumed to be 1.0.
  7809. Else the pointed-to variable will be set to the optimal scale.
  7810. This might be unwanted, e.g. when optimizing a transform between a right- and a
  7811. left-handed coordinate system.</dd>
  7812. <dt><span class="returnLabel">Returns:</span></dt>
  7813. <dd>3D affine transformation matrix \(3 \times 4\) of the form
  7814. \(T =
  7815. \begin{bmatrix}
  7816. R &amp; t\\
  7817. \end{bmatrix}
  7818. \)</dd>
  7819. </dl>
  7820. </li>
  7821. </ul>
  7822. <a name="estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-double:A-boolean-">
  7823. <!-- -->
  7824. </a>
  7825. <ul class="blockList">
  7826. <li class="blockList">
  7827. <h4>estimateAffine3D</h4>
  7828. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffine3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  7829. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  7830. double[]&nbsp;scale,
  7831. boolean&nbsp;force_rotation)</pre>
  7832. <div class="block">Computes an optimal affine transformation between two 3D point sets.
  7833. It computes \(R,s,t\) minimizing \(\sum{i} dst_i - c \cdot R \cdot src_i \)
  7834. where \(R\) is a 3x3 rotation matrix, \(t\) is a 3x1 translation vector and \(s\) is a
  7835. scalar size value. This is an implementation of the algorithm by Umeyama \cite umeyama1991least .
  7836. The estimated affine transform has a homogeneous scale which is a subclass of affine
  7837. transformations with 7 degrees of freedom. The paired point sets need to comprise at least 3
  7838. points each.</div>
  7839. <dl>
  7840. <dt><span class="paramLabel">Parameters:</span></dt>
  7841. <dd><code>src</code> - First input 3D point set.</dd>
  7842. <dd><code>dst</code> - Second input 3D point set.</dd>
  7843. <dd><code>scale</code> - If null is passed, the scale parameter c will be assumed to be 1.0.
  7844. Else the pointed-to variable will be set to the optimal scale.</dd>
  7845. <dd><code>force_rotation</code> - If true, the returned rotation will never be a reflection.
  7846. This might be unwanted, e.g. when optimizing a transform between a right- and a
  7847. left-handed coordinate system.</dd>
  7848. <dt><span class="returnLabel">Returns:</span></dt>
  7849. <dd>3D affine transformation matrix \(3 \times 4\) of the form
  7850. \(T =
  7851. \begin{bmatrix}
  7852. R &amp; t\\
  7853. \end{bmatrix}
  7854. \)</dd>
  7855. </dl>
  7856. </li>
  7857. </ul>
  7858. <a name="estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  7859. <!-- -->
  7860. </a>
  7861. <ul class="blockList">
  7862. <li class="blockList">
  7863. <h4>estimateAffine3D</h4>
  7864. <pre>public static&nbsp;int&nbsp;estimateAffine3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  7865. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  7866. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  7867. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</pre>
  7868. <div class="block">Computes an optimal affine transformation between two 3D point sets.
  7869. It computes
  7870. \(
  7871. \begin{bmatrix}
  7872. x\\
  7873. y\\
  7874. z\\
  7875. \end{bmatrix}
  7876. =
  7877. \begin{bmatrix}
  7878. a_{11} &amp; a_{12} &amp; a_{13}\\
  7879. a_{21} &amp; a_{22} &amp; a_{23}\\
  7880. a_{31} &amp; a_{32} &amp; a_{33}\\
  7881. \end{bmatrix}
  7882. \begin{bmatrix}
  7883. X\\
  7884. Y\\
  7885. Z\\
  7886. \end{bmatrix}
  7887. +
  7888. \begin{bmatrix}
  7889. b_1\\
  7890. b_2\\
  7891. b_3\\
  7892. \end{bmatrix}
  7893. \)</div>
  7894. <dl>
  7895. <dt><span class="paramLabel">Parameters:</span></dt>
  7896. <dd><code>src</code> - First input 3D point set containing \((X,Y,Z)\).</dd>
  7897. <dd><code>dst</code> - Second input 3D point set containing \((x,y,z)\).</dd>
  7898. <dd><code>out</code> - Output 3D affine transformation matrix \(3 \times 4\) of the form
  7899. \(
  7900. \begin{bmatrix}
  7901. a_{11} &amp; a_{12} &amp; a_{13} &amp; b_1\\
  7902. a_{21} &amp; a_{22} &amp; a_{23} &amp; b_2\\
  7903. a_{31} &amp; a_{32} &amp; a_{33} &amp; b_3\\
  7904. \end{bmatrix}
  7905. \)</dd>
  7906. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).
  7907. an inlier.
  7908. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7909. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7910. The function estimates an optimal 3D affine transformation between two 3D point sets using the
  7911. RANSAC algorithm.</dd>
  7912. <dt><span class="returnLabel">Returns:</span></dt>
  7913. <dd>automatically generated</dd>
  7914. </dl>
  7915. </li>
  7916. </ul>
  7917. <a name="estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  7918. <!-- -->
  7919. </a>
  7920. <ul class="blockList">
  7921. <li class="blockList">
  7922. <h4>estimateAffine3D</h4>
  7923. <pre>public static&nbsp;int&nbsp;estimateAffine3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  7924. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  7925. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  7926. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7927. double&nbsp;ransacThreshold)</pre>
  7928. <div class="block">Computes an optimal affine transformation between two 3D point sets.
  7929. It computes
  7930. \(
  7931. \begin{bmatrix}
  7932. x\\
  7933. y\\
  7934. z\\
  7935. \end{bmatrix}
  7936. =
  7937. \begin{bmatrix}
  7938. a_{11} &amp; a_{12} &amp; a_{13}\\
  7939. a_{21} &amp; a_{22} &amp; a_{23}\\
  7940. a_{31} &amp; a_{32} &amp; a_{33}\\
  7941. \end{bmatrix}
  7942. \begin{bmatrix}
  7943. X\\
  7944. Y\\
  7945. Z\\
  7946. \end{bmatrix}
  7947. +
  7948. \begin{bmatrix}
  7949. b_1\\
  7950. b_2\\
  7951. b_3\\
  7952. \end{bmatrix}
  7953. \)</div>
  7954. <dl>
  7955. <dt><span class="paramLabel">Parameters:</span></dt>
  7956. <dd><code>src</code> - First input 3D point set containing \((X,Y,Z)\).</dd>
  7957. <dd><code>dst</code> - Second input 3D point set containing \((x,y,z)\).</dd>
  7958. <dd><code>out</code> - Output 3D affine transformation matrix \(3 \times 4\) of the form
  7959. \(
  7960. \begin{bmatrix}
  7961. a_{11} &amp; a_{12} &amp; a_{13} &amp; b_1\\
  7962. a_{21} &amp; a_{22} &amp; a_{23} &amp; b_2\\
  7963. a_{31} &amp; a_{32} &amp; a_{33} &amp; b_3\\
  7964. \end{bmatrix}
  7965. \)</dd>
  7966. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  7967. <dd><code>ransacThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider a point as
  7968. an inlier.
  7969. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  7970. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  7971. The function estimates an optimal 3D affine transformation between two 3D point sets using the
  7972. RANSAC algorithm.</dd>
  7973. <dt><span class="returnLabel">Returns:</span></dt>
  7974. <dd>automatically generated</dd>
  7975. </dl>
  7976. </li>
  7977. </ul>
  7978. <a name="estimateAffine3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-double-">
  7979. <!-- -->
  7980. </a>
  7981. <ul class="blockList">
  7982. <li class="blockList">
  7983. <h4>estimateAffine3D</h4>
  7984. <pre>public static&nbsp;int&nbsp;estimateAffine3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  7985. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  7986. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  7987. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  7988. double&nbsp;ransacThreshold,
  7989. double&nbsp;confidence)</pre>
  7990. <div class="block">Computes an optimal affine transformation between two 3D point sets.
  7991. It computes
  7992. \(
  7993. \begin{bmatrix}
  7994. x\\
  7995. y\\
  7996. z\\
  7997. \end{bmatrix}
  7998. =
  7999. \begin{bmatrix}
  8000. a_{11} &amp; a_{12} &amp; a_{13}\\
  8001. a_{21} &amp; a_{22} &amp; a_{23}\\
  8002. a_{31} &amp; a_{32} &amp; a_{33}\\
  8003. \end{bmatrix}
  8004. \begin{bmatrix}
  8005. X\\
  8006. Y\\
  8007. Z\\
  8008. \end{bmatrix}
  8009. +
  8010. \begin{bmatrix}
  8011. b_1\\
  8012. b_2\\
  8013. b_3\\
  8014. \end{bmatrix}
  8015. \)</div>
  8016. <dl>
  8017. <dt><span class="paramLabel">Parameters:</span></dt>
  8018. <dd><code>src</code> - First input 3D point set containing \((X,Y,Z)\).</dd>
  8019. <dd><code>dst</code> - Second input 3D point set containing \((x,y,z)\).</dd>
  8020. <dd><code>out</code> - Output 3D affine transformation matrix \(3 \times 4\) of the form
  8021. \(
  8022. \begin{bmatrix}
  8023. a_{11} &amp; a_{12} &amp; a_{13} &amp; b_1\\
  8024. a_{21} &amp; a_{22} &amp; a_{23} &amp; b_2\\
  8025. a_{31} &amp; a_{32} &amp; a_{33} &amp; b_3\\
  8026. \end{bmatrix}
  8027. \)</dd>
  8028. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  8029. <dd><code>ransacThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider a point as
  8030. an inlier.</dd>
  8031. <dd><code>confidence</code> - Confidence level, between 0 and 1, for the estimated transformation. Anything
  8032. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8033. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8034. The function estimates an optimal 3D affine transformation between two 3D point sets using the
  8035. RANSAC algorithm.</dd>
  8036. <dt><span class="returnLabel">Returns:</span></dt>
  8037. <dd>automatically generated</dd>
  8038. </dl>
  8039. </li>
  8040. </ul>
  8041. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-">
  8042. <!-- -->
  8043. </a>
  8044. <ul class="blockList">
  8045. <li class="blockList">
  8046. <h4>estimateAffinePartial2D</h4>
  8047. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8048. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to)</pre>
  8049. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8050. two 2D point sets.</div>
  8051. <dl>
  8052. <dt><span class="paramLabel">Parameters:</span></dt>
  8053. <dd><code>from</code> - First input 2D point set.</dd>
  8054. <dd><code>to</code> - Second input 2D point set.
  8055. <ul>
  8056. <li>
  8057. REF: RANSAC - RANSAC-based robust method
  8058. </li>
  8059. <li>
  8060. REF: LMEDS - Least-Median robust method
  8061. RANSAC is the default method.
  8062. </li>
  8063. </ul>
  8064. a point as an inlier. Applies only to RANSAC.
  8065. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8066. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8067. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8068. <dt><span class="returnLabel">Returns:</span></dt>
  8069. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8070. empty matrix if transformation could not be estimated.
  8071. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8072. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8073. estimation.
  8074. The computed transformation is then refined further (using only inliers) with the
  8075. Levenberg-Marquardt method to reduce the re-projection error even more.
  8076. Estimated transformation matrix is:
  8077. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8078. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8079. \end{bmatrix} \)
  8080. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8081. translations in \( x, y \) axes respectively.
  8082. <b>Note:</b>
  8083. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8084. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8085. correctly only when there are more than 50% of inliers.
  8086. SEE: estimateAffine2D, getAffineTransform</dd>
  8087. </dl>
  8088. </li>
  8089. </ul>
  8090. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  8091. <!-- -->
  8092. </a>
  8093. <ul class="blockList">
  8094. <li class="blockList">
  8095. <h4>estimateAffinePartial2D</h4>
  8096. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8097. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  8098. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</pre>
  8099. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8100. two 2D point sets.</div>
  8101. <dl>
  8102. <dt><span class="paramLabel">Parameters:</span></dt>
  8103. <dd><code>from</code> - First input 2D point set.</dd>
  8104. <dd><code>to</code> - Second input 2D point set.</dd>
  8105. <dd><code>inliers</code> - Output vector indicating which points are inliers.
  8106. <ul>
  8107. <li>
  8108. REF: RANSAC - RANSAC-based robust method
  8109. </li>
  8110. <li>
  8111. REF: LMEDS - Least-Median robust method
  8112. RANSAC is the default method.
  8113. </li>
  8114. </ul>
  8115. a point as an inlier. Applies only to RANSAC.
  8116. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8117. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8118. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8119. <dt><span class="returnLabel">Returns:</span></dt>
  8120. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8121. empty matrix if transformation could not be estimated.
  8122. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8123. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8124. estimation.
  8125. The computed transformation is then refined further (using only inliers) with the
  8126. Levenberg-Marquardt method to reduce the re-projection error even more.
  8127. Estimated transformation matrix is:
  8128. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8129. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8130. \end{bmatrix} \)
  8131. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8132. translations in \( x, y \) axes respectively.
  8133. <b>Note:</b>
  8134. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8135. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8136. correctly only when there are more than 50% of inliers.
  8137. SEE: estimateAffine2D, getAffineTransform</dd>
  8138. </dl>
  8139. </li>
  8140. </ul>
  8141. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  8142. <!-- -->
  8143. </a>
  8144. <ul class="blockList">
  8145. <li class="blockList">
  8146. <h4>estimateAffinePartial2D</h4>
  8147. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8148. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  8149. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8150. int&nbsp;method)</pre>
  8151. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8152. two 2D point sets.</div>
  8153. <dl>
  8154. <dt><span class="paramLabel">Parameters:</span></dt>
  8155. <dd><code>from</code> - First input 2D point set.</dd>
  8156. <dd><code>to</code> - Second input 2D point set.</dd>
  8157. <dd><code>inliers</code> - Output vector indicating which points are inliers.</dd>
  8158. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  8159. <ul>
  8160. <li>
  8161. REF: RANSAC - RANSAC-based robust method
  8162. </li>
  8163. <li>
  8164. REF: LMEDS - Least-Median robust method
  8165. RANSAC is the default method.
  8166. </li>
  8167. </ul>
  8168. a point as an inlier. Applies only to RANSAC.
  8169. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8170. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8171. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8172. <dt><span class="returnLabel">Returns:</span></dt>
  8173. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8174. empty matrix if transformation could not be estimated.
  8175. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8176. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8177. estimation.
  8178. The computed transformation is then refined further (using only inliers) with the
  8179. Levenberg-Marquardt method to reduce the re-projection error even more.
  8180. Estimated transformation matrix is:
  8181. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8182. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8183. \end{bmatrix} \)
  8184. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8185. translations in \( x, y \) axes respectively.
  8186. <b>Note:</b>
  8187. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8188. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8189. correctly only when there are more than 50% of inliers.
  8190. SEE: estimateAffine2D, getAffineTransform</dd>
  8191. </dl>
  8192. </li>
  8193. </ul>
  8194. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">
  8195. <!-- -->
  8196. </a>
  8197. <ul class="blockList">
  8198. <li class="blockList">
  8199. <h4>estimateAffinePartial2D</h4>
  8200. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8201. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  8202. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8203. int&nbsp;method,
  8204. double&nbsp;ransacReprojThreshold)</pre>
  8205. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8206. two 2D point sets.</div>
  8207. <dl>
  8208. <dt><span class="paramLabel">Parameters:</span></dt>
  8209. <dd><code>from</code> - First input 2D point set.</dd>
  8210. <dd><code>to</code> - Second input 2D point set.</dd>
  8211. <dd><code>inliers</code> - Output vector indicating which points are inliers.</dd>
  8212. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  8213. <ul>
  8214. <li>
  8215. REF: RANSAC - RANSAC-based robust method
  8216. </li>
  8217. <li>
  8218. REF: LMEDS - Least-Median robust method
  8219. RANSAC is the default method.
  8220. </li>
  8221. </ul></dd>
  8222. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  8223. a point as an inlier. Applies only to RANSAC.
  8224. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8225. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8226. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8227. <dt><span class="returnLabel">Returns:</span></dt>
  8228. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8229. empty matrix if transformation could not be estimated.
  8230. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8231. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8232. estimation.
  8233. The computed transformation is then refined further (using only inliers) with the
  8234. Levenberg-Marquardt method to reduce the re-projection error even more.
  8235. Estimated transformation matrix is:
  8236. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8237. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8238. \end{bmatrix} \)
  8239. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8240. translations in \( x, y \) axes respectively.
  8241. <b>Note:</b>
  8242. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8243. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8244. correctly only when there are more than 50% of inliers.
  8245. SEE: estimateAffine2D, getAffineTransform</dd>
  8246. </dl>
  8247. </li>
  8248. </ul>
  8249. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-">
  8250. <!-- -->
  8251. </a>
  8252. <ul class="blockList">
  8253. <li class="blockList">
  8254. <h4>estimateAffinePartial2D</h4>
  8255. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8256. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  8257. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8258. int&nbsp;method,
  8259. double&nbsp;ransacReprojThreshold,
  8260. long&nbsp;maxIters)</pre>
  8261. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8262. two 2D point sets.</div>
  8263. <dl>
  8264. <dt><span class="paramLabel">Parameters:</span></dt>
  8265. <dd><code>from</code> - First input 2D point set.</dd>
  8266. <dd><code>to</code> - Second input 2D point set.</dd>
  8267. <dd><code>inliers</code> - Output vector indicating which points are inliers.</dd>
  8268. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  8269. <ul>
  8270. <li>
  8271. REF: RANSAC - RANSAC-based robust method
  8272. </li>
  8273. <li>
  8274. REF: LMEDS - Least-Median robust method
  8275. RANSAC is the default method.
  8276. </li>
  8277. </ul></dd>
  8278. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  8279. a point as an inlier. Applies only to RANSAC.</dd>
  8280. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  8281. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8282. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8283. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8284. <dt><span class="returnLabel">Returns:</span></dt>
  8285. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8286. empty matrix if transformation could not be estimated.
  8287. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8288. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8289. estimation.
  8290. The computed transformation is then refined further (using only inliers) with the
  8291. Levenberg-Marquardt method to reduce the re-projection error even more.
  8292. Estimated transformation matrix is:
  8293. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8294. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8295. \end{bmatrix} \)
  8296. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8297. translations in \( x, y \) axes respectively.
  8298. <b>Note:</b>
  8299. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8300. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8301. correctly only when there are more than 50% of inliers.
  8302. SEE: estimateAffine2D, getAffineTransform</dd>
  8303. </dl>
  8304. </li>
  8305. </ul>
  8306. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-">
  8307. <!-- -->
  8308. </a>
  8309. <ul class="blockList">
  8310. <li class="blockList">
  8311. <h4>estimateAffinePartial2D</h4>
  8312. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8313. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  8314. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8315. int&nbsp;method,
  8316. double&nbsp;ransacReprojThreshold,
  8317. long&nbsp;maxIters,
  8318. double&nbsp;confidence)</pre>
  8319. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8320. two 2D point sets.</div>
  8321. <dl>
  8322. <dt><span class="paramLabel">Parameters:</span></dt>
  8323. <dd><code>from</code> - First input 2D point set.</dd>
  8324. <dd><code>to</code> - Second input 2D point set.</dd>
  8325. <dd><code>inliers</code> - Output vector indicating which points are inliers.</dd>
  8326. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  8327. <ul>
  8328. <li>
  8329. REF: RANSAC - RANSAC-based robust method
  8330. </li>
  8331. <li>
  8332. REF: LMEDS - Least-Median robust method
  8333. RANSAC is the default method.
  8334. </li>
  8335. </ul></dd>
  8336. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  8337. a point as an inlier. Applies only to RANSAC.</dd>
  8338. <dd><code>maxIters</code> - The maximum number of robust method iterations.</dd>
  8339. <dd><code>confidence</code> - Confidence level, between 0 and 1, for the estimated transformation. Anything
  8340. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8341. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8342. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8343. <dt><span class="returnLabel">Returns:</span></dt>
  8344. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8345. empty matrix if transformation could not be estimated.
  8346. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8347. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8348. estimation.
  8349. The computed transformation is then refined further (using only inliers) with the
  8350. Levenberg-Marquardt method to reduce the re-projection error even more.
  8351. Estimated transformation matrix is:
  8352. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8353. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8354. \end{bmatrix} \)
  8355. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8356. translations in \( x, y \) axes respectively.
  8357. <b>Note:</b>
  8358. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8359. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8360. correctly only when there are more than 50% of inliers.
  8361. SEE: estimateAffine2D, getAffineTransform</dd>
  8362. </dl>
  8363. </li>
  8364. </ul>
  8365. <a name="estimateAffinePartial2D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-long-double-long-">
  8366. <!-- -->
  8367. </a>
  8368. <ul class="blockList">
  8369. <li class="blockList">
  8370. <h4>estimateAffinePartial2D</h4>
  8371. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;estimateAffinePartial2D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;from,
  8372. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;to,
  8373. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8374. int&nbsp;method,
  8375. double&nbsp;ransacReprojThreshold,
  8376. long&nbsp;maxIters,
  8377. double&nbsp;confidence,
  8378. long&nbsp;refineIters)</pre>
  8379. <div class="block">Computes an optimal limited affine transformation with 4 degrees of freedom between
  8380. two 2D point sets.</div>
  8381. <dl>
  8382. <dt><span class="paramLabel">Parameters:</span></dt>
  8383. <dd><code>from</code> - First input 2D point set.</dd>
  8384. <dd><code>to</code> - Second input 2D point set.</dd>
  8385. <dd><code>inliers</code> - Output vector indicating which points are inliers.</dd>
  8386. <dd><code>method</code> - Robust method used to compute transformation. The following methods are possible:
  8387. <ul>
  8388. <li>
  8389. REF: RANSAC - RANSAC-based robust method
  8390. </li>
  8391. <li>
  8392. REF: LMEDS - Least-Median robust method
  8393. RANSAC is the default method.
  8394. </li>
  8395. </ul></dd>
  8396. <dd><code>ransacReprojThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider
  8397. a point as an inlier. Applies only to RANSAC.</dd>
  8398. <dd><code>maxIters</code> - The maximum number of robust method iterations.</dd>
  8399. <dd><code>confidence</code> - Confidence level, between 0 and 1, for the estimated transformation. Anything
  8400. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8401. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.</dd>
  8402. <dd><code>refineIters</code> - Maximum number of iterations of refining algorithm (Levenberg-Marquardt).
  8403. Passing 0 will disable refining, so the output matrix will be output of robust method.</dd>
  8404. <dt><span class="returnLabel">Returns:</span></dt>
  8405. <dd>Output 2D affine transformation (4 degrees of freedom) matrix \(2 \times 3\) or
  8406. empty matrix if transformation could not be estimated.
  8407. The function estimates an optimal 2D affine transformation with 4 degrees of freedom limited to
  8408. combinations of translation, rotation, and uniform scaling. Uses the selected algorithm for robust
  8409. estimation.
  8410. The computed transformation is then refined further (using only inliers) with the
  8411. Levenberg-Marquardt method to reduce the re-projection error even more.
  8412. Estimated transformation matrix is:
  8413. \( \begin{bmatrix} \cos(\theta) \cdot s &amp; -\sin(\theta) \cdot s &amp; t_x \\
  8414. \sin(\theta) \cdot s &amp; \cos(\theta) \cdot s &amp; t_y
  8415. \end{bmatrix} \)
  8416. Where \( \theta \) is the rotation angle, \( s \) the scaling factor and \( t_x, t_y \) are
  8417. translations in \( x, y \) axes respectively.
  8418. <b>Note:</b>
  8419. The RANSAC method can handle practically any ratio of outliers but need a threshold to
  8420. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  8421. correctly only when there are more than 50% of inliers.
  8422. SEE: estimateAffine2D, getAffineTransform</dd>
  8423. </dl>
  8424. </li>
  8425. </ul>
  8426. <a name="estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-">
  8427. <!-- -->
  8428. </a>
  8429. <ul class="blockList">
  8430. <li class="blockList">
  8431. <h4>estimateChessboardSharpness</h4>
  8432. <pre>public static&nbsp;<a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a>&nbsp;estimateChessboardSharpness(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  8433. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  8434. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners)</pre>
  8435. <div class="block">Estimates the sharpness of a detected chessboard.
  8436. Image sharpness, as well as brightness, are a critical parameter for accuracte
  8437. camera calibration. For accessing these parameters for filtering out
  8438. problematic calibraiton images, this method calculates edge profiles by traveling from
  8439. black to white chessboard cell centers. Based on this, the number of pixels is
  8440. calculated required to transit from black to white. This width of the
  8441. transition area is a good indication of how sharp the chessboard is imaged
  8442. and should be below ~3.0 pixels.</div>
  8443. <dl>
  8444. <dt><span class="paramLabel">Parameters:</span></dt>
  8445. <dd><code>image</code> - Gray image used to find chessboard corners</dd>
  8446. <dd><code>patternSize</code> - Size of a found chessboard pattern</dd>
  8447. <dd><code>corners</code> - Corners found by #findChessboardCornersSB
  8448. The optional sharpness array is of type CV_32FC1 and has for each calculated
  8449. profile one row with the following five entries:
  8450. 0 = x coordinate of the underlying edge in the image
  8451. 1 = y coordinate of the underlying edge in the image
  8452. 2 = width of the transition area (sharpness)
  8453. 3 = signal strength in the black cell (min brightness)
  8454. 4 = signal strength in the white cell (max brightness)</dd>
  8455. <dt><span class="returnLabel">Returns:</span></dt>
  8456. <dd>Scalar(average sharpness, average min brightness, average max brightness,0)</dd>
  8457. </dl>
  8458. </li>
  8459. </ul>
  8460. <a name="estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-float-">
  8461. <!-- -->
  8462. </a>
  8463. <ul class="blockList">
  8464. <li class="blockList">
  8465. <h4>estimateChessboardSharpness</h4>
  8466. <pre>public static&nbsp;<a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a>&nbsp;estimateChessboardSharpness(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  8467. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  8468. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  8469. float&nbsp;rise_distance)</pre>
  8470. <div class="block">Estimates the sharpness of a detected chessboard.
  8471. Image sharpness, as well as brightness, are a critical parameter for accuracte
  8472. camera calibration. For accessing these parameters for filtering out
  8473. problematic calibraiton images, this method calculates edge profiles by traveling from
  8474. black to white chessboard cell centers. Based on this, the number of pixels is
  8475. calculated required to transit from black to white. This width of the
  8476. transition area is a good indication of how sharp the chessboard is imaged
  8477. and should be below ~3.0 pixels.</div>
  8478. <dl>
  8479. <dt><span class="paramLabel">Parameters:</span></dt>
  8480. <dd><code>image</code> - Gray image used to find chessboard corners</dd>
  8481. <dd><code>patternSize</code> - Size of a found chessboard pattern</dd>
  8482. <dd><code>corners</code> - Corners found by #findChessboardCornersSB</dd>
  8483. <dd><code>rise_distance</code> - Rise distance 0.8 means 10% ... 90% of the final signal strength
  8484. The optional sharpness array is of type CV_32FC1 and has for each calculated
  8485. profile one row with the following five entries:
  8486. 0 = x coordinate of the underlying edge in the image
  8487. 1 = y coordinate of the underlying edge in the image
  8488. 2 = width of the transition area (sharpness)
  8489. 3 = signal strength in the black cell (min brightness)
  8490. 4 = signal strength in the white cell (max brightness)</dd>
  8491. <dt><span class="returnLabel">Returns:</span></dt>
  8492. <dd>Scalar(average sharpness, average min brightness, average max brightness,0)</dd>
  8493. </dl>
  8494. </li>
  8495. </ul>
  8496. <a name="estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-float-boolean-">
  8497. <!-- -->
  8498. </a>
  8499. <ul class="blockList">
  8500. <li class="blockList">
  8501. <h4>estimateChessboardSharpness</h4>
  8502. <pre>public static&nbsp;<a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a>&nbsp;estimateChessboardSharpness(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  8503. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  8504. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  8505. float&nbsp;rise_distance,
  8506. boolean&nbsp;vertical)</pre>
  8507. <div class="block">Estimates the sharpness of a detected chessboard.
  8508. Image sharpness, as well as brightness, are a critical parameter for accuracte
  8509. camera calibration. For accessing these parameters for filtering out
  8510. problematic calibraiton images, this method calculates edge profiles by traveling from
  8511. black to white chessboard cell centers. Based on this, the number of pixels is
  8512. calculated required to transit from black to white. This width of the
  8513. transition area is a good indication of how sharp the chessboard is imaged
  8514. and should be below ~3.0 pixels.</div>
  8515. <dl>
  8516. <dt><span class="paramLabel">Parameters:</span></dt>
  8517. <dd><code>image</code> - Gray image used to find chessboard corners</dd>
  8518. <dd><code>patternSize</code> - Size of a found chessboard pattern</dd>
  8519. <dd><code>corners</code> - Corners found by #findChessboardCornersSB</dd>
  8520. <dd><code>rise_distance</code> - Rise distance 0.8 means 10% ... 90% of the final signal strength</dd>
  8521. <dd><code>vertical</code> - By default edge responses for horizontal lines are calculated
  8522. The optional sharpness array is of type CV_32FC1 and has for each calculated
  8523. profile one row with the following five entries:
  8524. 0 = x coordinate of the underlying edge in the image
  8525. 1 = y coordinate of the underlying edge in the image
  8526. 2 = width of the transition area (sharpness)
  8527. 3 = signal strength in the black cell (min brightness)
  8528. 4 = signal strength in the white cell (max brightness)</dd>
  8529. <dt><span class="returnLabel">Returns:</span></dt>
  8530. <dd>Scalar(average sharpness, average min brightness, average max brightness,0)</dd>
  8531. </dl>
  8532. </li>
  8533. </ul>
  8534. <a name="estimateChessboardSharpness-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-float-boolean-org.opencv.core.Mat-">
  8535. <!-- -->
  8536. </a>
  8537. <ul class="blockList">
  8538. <li class="blockList">
  8539. <h4>estimateChessboardSharpness</h4>
  8540. <pre>public static&nbsp;<a href="../../../org/opencv/core/Scalar.html" title="class in org.opencv.core">Scalar</a>&nbsp;estimateChessboardSharpness(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  8541. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  8542. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  8543. float&nbsp;rise_distance,
  8544. boolean&nbsp;vertical,
  8545. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;sharpness)</pre>
  8546. <div class="block">Estimates the sharpness of a detected chessboard.
  8547. Image sharpness, as well as brightness, are a critical parameter for accuracte
  8548. camera calibration. For accessing these parameters for filtering out
  8549. problematic calibraiton images, this method calculates edge profiles by traveling from
  8550. black to white chessboard cell centers. Based on this, the number of pixels is
  8551. calculated required to transit from black to white. This width of the
  8552. transition area is a good indication of how sharp the chessboard is imaged
  8553. and should be below ~3.0 pixels.</div>
  8554. <dl>
  8555. <dt><span class="paramLabel">Parameters:</span></dt>
  8556. <dd><code>image</code> - Gray image used to find chessboard corners</dd>
  8557. <dd><code>patternSize</code> - Size of a found chessboard pattern</dd>
  8558. <dd><code>corners</code> - Corners found by #findChessboardCornersSB</dd>
  8559. <dd><code>rise_distance</code> - Rise distance 0.8 means 10% ... 90% of the final signal strength</dd>
  8560. <dd><code>vertical</code> - By default edge responses for horizontal lines are calculated</dd>
  8561. <dd><code>sharpness</code> - Optional output array with a sharpness value for calculated edge responses (see description)
  8562. The optional sharpness array is of type CV_32FC1 and has for each calculated
  8563. profile one row with the following five entries:
  8564. 0 = x coordinate of the underlying edge in the image
  8565. 1 = y coordinate of the underlying edge in the image
  8566. 2 = width of the transition area (sharpness)
  8567. 3 = signal strength in the black cell (min brightness)
  8568. 4 = signal strength in the white cell (max brightness)</dd>
  8569. <dt><span class="returnLabel">Returns:</span></dt>
  8570. <dd>Scalar(average sharpness, average min brightness, average max brightness,0)</dd>
  8571. </dl>
  8572. </li>
  8573. </ul>
  8574. <a name="estimateTranslation3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  8575. <!-- -->
  8576. </a>
  8577. <ul class="blockList">
  8578. <li class="blockList">
  8579. <h4>estimateTranslation3D</h4>
  8580. <pre>public static&nbsp;int&nbsp;estimateTranslation3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  8581. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  8582. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  8583. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</pre>
  8584. <div class="block">Computes an optimal translation between two 3D point sets.
  8585. It computes
  8586. \(
  8587. \begin{bmatrix}
  8588. x\\
  8589. y\\
  8590. z\\
  8591. \end{bmatrix}
  8592. =
  8593. \begin{bmatrix}
  8594. X\\
  8595. Y\\
  8596. Z\\
  8597. \end{bmatrix}
  8598. +
  8599. \begin{bmatrix}
  8600. b_1\\
  8601. b_2\\
  8602. b_3\\
  8603. \end{bmatrix}
  8604. \)</div>
  8605. <dl>
  8606. <dt><span class="paramLabel">Parameters:</span></dt>
  8607. <dd><code>src</code> - First input 3D point set containing \((X,Y,Z)\).</dd>
  8608. <dd><code>dst</code> - Second input 3D point set containing \((x,y,z)\).</dd>
  8609. <dd><code>out</code> - Output 3D translation vector \(3 \times 1\) of the form
  8610. \(
  8611. \begin{bmatrix}
  8612. b_1 \\
  8613. b_2 \\
  8614. b_3 \\
  8615. \end{bmatrix}
  8616. \)</dd>
  8617. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).
  8618. an inlier.
  8619. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8620. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8621. The function estimates an optimal 3D translation between two 3D point sets using the
  8622. RANSAC algorithm.</dd>
  8623. <dt><span class="returnLabel">Returns:</span></dt>
  8624. <dd>automatically generated</dd>
  8625. </dl>
  8626. </li>
  8627. </ul>
  8628. <a name="estimateTranslation3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  8629. <!-- -->
  8630. </a>
  8631. <ul class="blockList">
  8632. <li class="blockList">
  8633. <h4>estimateTranslation3D</h4>
  8634. <pre>public static&nbsp;int&nbsp;estimateTranslation3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  8635. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  8636. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  8637. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8638. double&nbsp;ransacThreshold)</pre>
  8639. <div class="block">Computes an optimal translation between two 3D point sets.
  8640. It computes
  8641. \(
  8642. \begin{bmatrix}
  8643. x\\
  8644. y\\
  8645. z\\
  8646. \end{bmatrix}
  8647. =
  8648. \begin{bmatrix}
  8649. X\\
  8650. Y\\
  8651. Z\\
  8652. \end{bmatrix}
  8653. +
  8654. \begin{bmatrix}
  8655. b_1\\
  8656. b_2\\
  8657. b_3\\
  8658. \end{bmatrix}
  8659. \)</div>
  8660. <dl>
  8661. <dt><span class="paramLabel">Parameters:</span></dt>
  8662. <dd><code>src</code> - First input 3D point set containing \((X,Y,Z)\).</dd>
  8663. <dd><code>dst</code> - Second input 3D point set containing \((x,y,z)\).</dd>
  8664. <dd><code>out</code> - Output 3D translation vector \(3 \times 1\) of the form
  8665. \(
  8666. \begin{bmatrix}
  8667. b_1 \\
  8668. b_2 \\
  8669. b_3 \\
  8670. \end{bmatrix}
  8671. \)</dd>
  8672. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  8673. <dd><code>ransacThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider a point as
  8674. an inlier.
  8675. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8676. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8677. The function estimates an optimal 3D translation between two 3D point sets using the
  8678. RANSAC algorithm.</dd>
  8679. <dt><span class="returnLabel">Returns:</span></dt>
  8680. <dd>automatically generated</dd>
  8681. </dl>
  8682. </li>
  8683. </ul>
  8684. <a name="estimateTranslation3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-double-">
  8685. <!-- -->
  8686. </a>
  8687. <ul class="blockList">
  8688. <li class="blockList">
  8689. <h4>estimateTranslation3D</h4>
  8690. <pre>public static&nbsp;int&nbsp;estimateTranslation3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  8691. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  8692. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;out,
  8693. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  8694. double&nbsp;ransacThreshold,
  8695. double&nbsp;confidence)</pre>
  8696. <div class="block">Computes an optimal translation between two 3D point sets.
  8697. It computes
  8698. \(
  8699. \begin{bmatrix}
  8700. x\\
  8701. y\\
  8702. z\\
  8703. \end{bmatrix}
  8704. =
  8705. \begin{bmatrix}
  8706. X\\
  8707. Y\\
  8708. Z\\
  8709. \end{bmatrix}
  8710. +
  8711. \begin{bmatrix}
  8712. b_1\\
  8713. b_2\\
  8714. b_3\\
  8715. \end{bmatrix}
  8716. \)</div>
  8717. <dl>
  8718. <dt><span class="paramLabel">Parameters:</span></dt>
  8719. <dd><code>src</code> - First input 3D point set containing \((X,Y,Z)\).</dd>
  8720. <dd><code>dst</code> - Second input 3D point set containing \((x,y,z)\).</dd>
  8721. <dd><code>out</code> - Output 3D translation vector \(3 \times 1\) of the form
  8722. \(
  8723. \begin{bmatrix}
  8724. b_1 \\
  8725. b_2 \\
  8726. b_3 \\
  8727. \end{bmatrix}
  8728. \)</dd>
  8729. <dd><code>inliers</code> - Output vector indicating which points are inliers (1-inlier, 0-outlier).</dd>
  8730. <dd><code>ransacThreshold</code> - Maximum reprojection error in the RANSAC algorithm to consider a point as
  8731. an inlier.</dd>
  8732. <dd><code>confidence</code> - Confidence level, between 0 and 1, for the estimated transformation. Anything
  8733. between 0.95 and 0.99 is usually good enough. Values too close to 1 can slow down the estimation
  8734. significantly. Values lower than 0.8-0.9 can result in an incorrectly estimated transformation.
  8735. The function estimates an optimal 3D translation between two 3D point sets using the
  8736. RANSAC algorithm.</dd>
  8737. <dt><span class="returnLabel">Returns:</span></dt>
  8738. <dd>automatically generated</dd>
  8739. </dl>
  8740. </li>
  8741. </ul>
  8742. <a name="filterHomographyDecompByVisibleRefpoints-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  8743. <!-- -->
  8744. </a>
  8745. <ul class="blockList">
  8746. <li class="blockList">
  8747. <h4>filterHomographyDecompByVisibleRefpoints</h4>
  8748. <pre>public static&nbsp;void&nbsp;filterHomographyDecompByVisibleRefpoints(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rotations,
  8749. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;normals,
  8750. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;beforePoints,
  8751. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;afterPoints,
  8752. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;possibleSolutions)</pre>
  8753. <div class="block">Filters homography decompositions based on additional information.</div>
  8754. <dl>
  8755. <dt><span class="paramLabel">Parameters:</span></dt>
  8756. <dd><code>rotations</code> - Vector of rotation matrices.</dd>
  8757. <dd><code>normals</code> - Vector of plane normal matrices.</dd>
  8758. <dd><code>beforePoints</code> - Vector of (rectified) visible reference points before the homography is applied</dd>
  8759. <dd><code>afterPoints</code> - Vector of (rectified) visible reference points after the homography is applied</dd>
  8760. <dd><code>possibleSolutions</code> - Vector of int indices representing the viable solution set after filtering
  8761. This function is intended to filter the output of the #decomposeHomographyMat based on additional
  8762. information as described in CITE: Malis2007 . The summary of the method: the #decomposeHomographyMat function
  8763. returns 2 unique solutions and their "opposites" for a total of 4 solutions. If we have access to the
  8764. sets of points visible in the camera frame before and after the homography transformation is applied,
  8765. we can determine which are the true potential solutions and which are the opposites by verifying which
  8766. homographies are consistent with all visible reference points being in front of the camera. The inputs
  8767. are left unchanged; the filtered solution set is returned as indices into the existing one.</dd>
  8768. </dl>
  8769. </li>
  8770. </ul>
  8771. <a name="filterHomographyDecompByVisibleRefpoints-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  8772. <!-- -->
  8773. </a>
  8774. <ul class="blockList">
  8775. <li class="blockList">
  8776. <h4>filterHomographyDecompByVisibleRefpoints</h4>
  8777. <pre>public static&nbsp;void&nbsp;filterHomographyDecompByVisibleRefpoints(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rotations,
  8778. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;normals,
  8779. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;beforePoints,
  8780. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;afterPoints,
  8781. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;possibleSolutions,
  8782. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pointsMask)</pre>
  8783. <div class="block">Filters homography decompositions based on additional information.</div>
  8784. <dl>
  8785. <dt><span class="paramLabel">Parameters:</span></dt>
  8786. <dd><code>rotations</code> - Vector of rotation matrices.</dd>
  8787. <dd><code>normals</code> - Vector of plane normal matrices.</dd>
  8788. <dd><code>beforePoints</code> - Vector of (rectified) visible reference points before the homography is applied</dd>
  8789. <dd><code>afterPoints</code> - Vector of (rectified) visible reference points after the homography is applied</dd>
  8790. <dd><code>possibleSolutions</code> - Vector of int indices representing the viable solution set after filtering</dd>
  8791. <dd><code>pointsMask</code> - optional Mat/Vector of 8u type representing the mask for the inliers as given by the #findHomography function
  8792. This function is intended to filter the output of the #decomposeHomographyMat based on additional
  8793. information as described in CITE: Malis2007 . The summary of the method: the #decomposeHomographyMat function
  8794. returns 2 unique solutions and their "opposites" for a total of 4 solutions. If we have access to the
  8795. sets of points visible in the camera frame before and after the homography transformation is applied,
  8796. we can determine which are the true potential solutions and which are the opposites by verifying which
  8797. homographies are consistent with all visible reference points being in front of the camera. The inputs
  8798. are left unchanged; the filtered solution set is returned as indices into the existing one.</dd>
  8799. </dl>
  8800. </li>
  8801. </ul>
  8802. <a name="filterSpeckles-org.opencv.core.Mat-double-int-double-">
  8803. <!-- -->
  8804. </a>
  8805. <ul class="blockList">
  8806. <li class="blockList">
  8807. <h4>filterSpeckles</h4>
  8808. <pre>public static&nbsp;void&nbsp;filterSpeckles(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  8809. double&nbsp;newVal,
  8810. int&nbsp;maxSpeckleSize,
  8811. double&nbsp;maxDiff)</pre>
  8812. <div class="block">Filters off small noise blobs (speckles) in the disparity map</div>
  8813. <dl>
  8814. <dt><span class="paramLabel">Parameters:</span></dt>
  8815. <dd><code>img</code> - The input 16-bit signed disparity image</dd>
  8816. <dd><code>newVal</code> - The disparity value used to paint-off the speckles</dd>
  8817. <dd><code>maxSpeckleSize</code> - The maximum speckle size to consider it a speckle. Larger blobs are not
  8818. affected by the algorithm</dd>
  8819. <dd><code>maxDiff</code> - Maximum difference between neighbor disparity pixels to put them into the same
  8820. blob. Note that since StereoBM, StereoSGBM and may be other algorithms return a fixed-point
  8821. disparity map, where disparity values are multiplied by 16, this scale factor should be taken into
  8822. account when specifying this parameter value.</dd>
  8823. </dl>
  8824. </li>
  8825. </ul>
  8826. <a name="filterSpeckles-org.opencv.core.Mat-double-int-double-org.opencv.core.Mat-">
  8827. <!-- -->
  8828. </a>
  8829. <ul class="blockList">
  8830. <li class="blockList">
  8831. <h4>filterSpeckles</h4>
  8832. <pre>public static&nbsp;void&nbsp;filterSpeckles(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  8833. double&nbsp;newVal,
  8834. int&nbsp;maxSpeckleSize,
  8835. double&nbsp;maxDiff,
  8836. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;buf)</pre>
  8837. <div class="block">Filters off small noise blobs (speckles) in the disparity map</div>
  8838. <dl>
  8839. <dt><span class="paramLabel">Parameters:</span></dt>
  8840. <dd><code>img</code> - The input 16-bit signed disparity image</dd>
  8841. <dd><code>newVal</code> - The disparity value used to paint-off the speckles</dd>
  8842. <dd><code>maxSpeckleSize</code> - The maximum speckle size to consider it a speckle. Larger blobs are not
  8843. affected by the algorithm</dd>
  8844. <dd><code>maxDiff</code> - Maximum difference between neighbor disparity pixels to put them into the same
  8845. blob. Note that since StereoBM, StereoSGBM and may be other algorithms return a fixed-point
  8846. disparity map, where disparity values are multiplied by 16, this scale factor should be taken into
  8847. account when specifying this parameter value.</dd>
  8848. <dd><code>buf</code> - The optional temporary buffer to avoid memory allocation within the function.</dd>
  8849. </dl>
  8850. </li>
  8851. </ul>
  8852. <a name="find4QuadCornerSubpix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-">
  8853. <!-- -->
  8854. </a>
  8855. <ul class="blockList">
  8856. <li class="blockList">
  8857. <h4>find4QuadCornerSubpix</h4>
  8858. <pre>public static&nbsp;boolean&nbsp;find4QuadCornerSubpix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;img,
  8859. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  8860. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;region_size)</pre>
  8861. </li>
  8862. </ul>
  8863. <a name="findChessboardCorners-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.MatOfPoint2f-">
  8864. <!-- -->
  8865. </a>
  8866. <ul class="blockList">
  8867. <li class="blockList">
  8868. <h4>findChessboardCorners</h4>
  8869. <pre>public static&nbsp;boolean&nbsp;findChessboardCorners(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  8870. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  8871. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;corners)</pre>
  8872. <div class="block">Finds the positions of internal corners of the chessboard.</div>
  8873. <dl>
  8874. <dt><span class="paramLabel">Parameters:</span></dt>
  8875. <dd><code>image</code> - Source chessboard view. It must be an 8-bit grayscale or color image.</dd>
  8876. <dd><code>patternSize</code> - Number of inner corners per a chessboard row and column
  8877. ( patternSize = cv::Size(points_per_row,points_per_colum) = cv::Size(columns,rows) ).</dd>
  8878. <dd><code>corners</code> - Output array of detected corners.
  8879. <ul>
  8880. <li>
  8881. REF: CALIB_CB_ADAPTIVE_THRESH Use adaptive thresholding to convert the image to black
  8882. and white, rather than a fixed threshold level (computed from the average image brightness).
  8883. </li>
  8884. <li>
  8885. REF: CALIB_CB_NORMALIZE_IMAGE Normalize the image gamma with #equalizeHist before
  8886. applying fixed or adaptive thresholding.
  8887. </li>
  8888. <li>
  8889. REF: CALIB_CB_FILTER_QUADS Use additional criteria (like contour area, perimeter,
  8890. square-like shape) to filter out false quads extracted at the contour retrieval stage.
  8891. </li>
  8892. <li>
  8893. REF: CALIB_CB_FAST_CHECK Run a fast check on the image that looks for chessboard corners,
  8894. and shortcut the call if none is found. This can drastically speed up the call in the
  8895. degenerate condition when no chessboard is observed.
  8896. </li>
  8897. </ul>
  8898. The function attempts to determine whether the input image is a view of the chessboard pattern and
  8899. locate the internal chessboard corners. The function returns a non-zero value if all of the corners
  8900. are found and they are placed in a certain order (row by row, left to right in every row).
  8901. Otherwise, if the function fails to find all the corners or reorder them, it returns 0. For example,
  8902. a regular chessboard has 8 x 8 squares and 7 x 7 internal corners, that is, points where the black
  8903. squares touch each other. The detected coordinates are approximate, and to determine their positions
  8904. more accurately, the function calls #cornerSubPix. You also may use the function #cornerSubPix with
  8905. different parameters if returned coordinates are not accurate enough.
  8906. Sample usage of detecting and drawing chessboard corners: :
  8907. <code>
  8908. Size patternsize(8,6); //interior number of corners
  8909. Mat gray = ....; //source image
  8910. vector&lt;Point2f&gt; corners; //this will be filled by the detected corners
  8911. //CALIB_CB_FAST_CHECK saves a lot of time on images
  8912. //that do not contain any chessboard corners
  8913. bool patternfound = findChessboardCorners(gray, patternsize, corners,
  8914. CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
  8915. + CALIB_CB_FAST_CHECK);
  8916. if(patternfound)
  8917. cornerSubPix(gray, corners, Size(11, 11), Size(-1, -1),
  8918. TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
  8919. drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
  8920. </code>
  8921. <b>Note:</b> The function requires white space (like a square-thick border, the wider the better) around
  8922. the board to make the detection more robust in various environments. Otherwise, if there is no
  8923. border and the background is dark, the outer black squares cannot be segmented properly and so the
  8924. square grouping and ordering algorithm fails.
  8925. Use gen_pattern.py (REF: tutorial_camera_calibration_pattern) to create checkerboard.</dd>
  8926. <dt><span class="returnLabel">Returns:</span></dt>
  8927. <dd>automatically generated</dd>
  8928. </dl>
  8929. </li>
  8930. </ul>
  8931. <a name="findChessboardCorners-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.MatOfPoint2f-int-">
  8932. <!-- -->
  8933. </a>
  8934. <ul class="blockList">
  8935. <li class="blockList">
  8936. <h4>findChessboardCorners</h4>
  8937. <pre>public static&nbsp;boolean&nbsp;findChessboardCorners(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  8938. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  8939. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;corners,
  8940. int&nbsp;flags)</pre>
  8941. <div class="block">Finds the positions of internal corners of the chessboard.</div>
  8942. <dl>
  8943. <dt><span class="paramLabel">Parameters:</span></dt>
  8944. <dd><code>image</code> - Source chessboard view. It must be an 8-bit grayscale or color image.</dd>
  8945. <dd><code>patternSize</code> - Number of inner corners per a chessboard row and column
  8946. ( patternSize = cv::Size(points_per_row,points_per_colum) = cv::Size(columns,rows) ).</dd>
  8947. <dd><code>corners</code> - Output array of detected corners.</dd>
  8948. <dd><code>flags</code> - Various operation flags that can be zero or a combination of the following values:
  8949. <ul>
  8950. <li>
  8951. REF: CALIB_CB_ADAPTIVE_THRESH Use adaptive thresholding to convert the image to black
  8952. and white, rather than a fixed threshold level (computed from the average image brightness).
  8953. </li>
  8954. <li>
  8955. REF: CALIB_CB_NORMALIZE_IMAGE Normalize the image gamma with #equalizeHist before
  8956. applying fixed or adaptive thresholding.
  8957. </li>
  8958. <li>
  8959. REF: CALIB_CB_FILTER_QUADS Use additional criteria (like contour area, perimeter,
  8960. square-like shape) to filter out false quads extracted at the contour retrieval stage.
  8961. </li>
  8962. <li>
  8963. REF: CALIB_CB_FAST_CHECK Run a fast check on the image that looks for chessboard corners,
  8964. and shortcut the call if none is found. This can drastically speed up the call in the
  8965. degenerate condition when no chessboard is observed.
  8966. </li>
  8967. </ul>
  8968. The function attempts to determine whether the input image is a view of the chessboard pattern and
  8969. locate the internal chessboard corners. The function returns a non-zero value if all of the corners
  8970. are found and they are placed in a certain order (row by row, left to right in every row).
  8971. Otherwise, if the function fails to find all the corners or reorder them, it returns 0. For example,
  8972. a regular chessboard has 8 x 8 squares and 7 x 7 internal corners, that is, points where the black
  8973. squares touch each other. The detected coordinates are approximate, and to determine their positions
  8974. more accurately, the function calls #cornerSubPix. You also may use the function #cornerSubPix with
  8975. different parameters if returned coordinates are not accurate enough.
  8976. Sample usage of detecting and drawing chessboard corners: :
  8977. <code>
  8978. Size patternsize(8,6); //interior number of corners
  8979. Mat gray = ....; //source image
  8980. vector&lt;Point2f&gt; corners; //this will be filled by the detected corners
  8981. //CALIB_CB_FAST_CHECK saves a lot of time on images
  8982. //that do not contain any chessboard corners
  8983. bool patternfound = findChessboardCorners(gray, patternsize, corners,
  8984. CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
  8985. + CALIB_CB_FAST_CHECK);
  8986. if(patternfound)
  8987. cornerSubPix(gray, corners, Size(11, 11), Size(-1, -1),
  8988. TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
  8989. drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
  8990. </code>
  8991. <b>Note:</b> The function requires white space (like a square-thick border, the wider the better) around
  8992. the board to make the detection more robust in various environments. Otherwise, if there is no
  8993. border and the background is dark, the outer black squares cannot be segmented properly and so the
  8994. square grouping and ordering algorithm fails.
  8995. Use gen_pattern.py (REF: tutorial_camera_calibration_pattern) to create checkerboard.</dd>
  8996. <dt><span class="returnLabel">Returns:</span></dt>
  8997. <dd>automatically generated</dd>
  8998. </dl>
  8999. </li>
  9000. </ul>
  9001. <a name="findChessboardCornersSB-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-">
  9002. <!-- -->
  9003. </a>
  9004. <ul class="blockList">
  9005. <li class="blockList">
  9006. <h4>findChessboardCornersSB</h4>
  9007. <pre>public static&nbsp;boolean&nbsp;findChessboardCornersSB(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  9008. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  9009. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners)</pre>
  9010. </li>
  9011. </ul>
  9012. <a name="findChessboardCornersSB-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-int-">
  9013. <!-- -->
  9014. </a>
  9015. <ul class="blockList">
  9016. <li class="blockList">
  9017. <h4>findChessboardCornersSB</h4>
  9018. <pre>public static&nbsp;boolean&nbsp;findChessboardCornersSB(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  9019. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  9020. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  9021. int&nbsp;flags)</pre>
  9022. </li>
  9023. </ul>
  9024. <a name="findChessboardCornersSBWithMeta-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-int-org.opencv.core.Mat-">
  9025. <!-- -->
  9026. </a>
  9027. <ul class="blockList">
  9028. <li class="blockList">
  9029. <h4>findChessboardCornersSBWithMeta</h4>
  9030. <pre>public static&nbsp;boolean&nbsp;findChessboardCornersSBWithMeta(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  9031. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  9032. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;corners,
  9033. int&nbsp;flags,
  9034. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;meta)</pre>
  9035. <div class="block">Finds the positions of internal corners of the chessboard using a sector based approach.</div>
  9036. <dl>
  9037. <dt><span class="paramLabel">Parameters:</span></dt>
  9038. <dd><code>image</code> - Source chessboard view. It must be an 8-bit grayscale or color image.</dd>
  9039. <dd><code>patternSize</code> - Number of inner corners per a chessboard row and column
  9040. ( patternSize = cv::Size(points_per_row,points_per_colum) = cv::Size(columns,rows) ).</dd>
  9041. <dd><code>corners</code> - Output array of detected corners.</dd>
  9042. <dd><code>flags</code> - Various operation flags that can be zero or a combination of the following values:
  9043. <ul>
  9044. <li>
  9045. REF: CALIB_CB_NORMALIZE_IMAGE Normalize the image gamma with equalizeHist before detection.
  9046. </li>
  9047. <li>
  9048. REF: CALIB_CB_EXHAUSTIVE Run an exhaustive search to improve detection rate.
  9049. </li>
  9050. <li>
  9051. REF: CALIB_CB_ACCURACY Up sample input image to improve sub-pixel accuracy due to aliasing effects.
  9052. </li>
  9053. <li>
  9054. REF: CALIB_CB_LARGER The detected pattern is allowed to be larger than patternSize (see description).
  9055. </li>
  9056. <li>
  9057. REF: CALIB_CB_MARKER The detected pattern must have a marker (see description).
  9058. This should be used if an accurate camera calibration is required.
  9059. </li>
  9060. </ul></dd>
  9061. <dd><code>meta</code> - Optional output arrray of detected corners (CV_8UC1 and size = cv::Size(columns,rows)).
  9062. Each entry stands for one corner of the pattern and can have one of the following values:
  9063. <ul>
  9064. <li>
  9065. 0 = no meta data attached
  9066. </li>
  9067. <li>
  9068. 1 = left-top corner of a black cell
  9069. </li>
  9070. <li>
  9071. 2 = left-top corner of a white cell
  9072. </li>
  9073. <li>
  9074. 3 = left-top corner of a black cell with a white marker dot
  9075. </li>
  9076. <li>
  9077. 4 = left-top corner of a white cell with a black marker dot (pattern origin in case of markers otherwise first corner)
  9078. </li>
  9079. </ul>
  9080. The function is analog to #findChessboardCorners but uses a localized radon
  9081. transformation approximated by box filters being more robust to all sort of
  9082. noise, faster on larger images and is able to directly return the sub-pixel
  9083. position of the internal chessboard corners. The Method is based on the paper
  9084. CITE: duda2018 "Accurate Detection and Localization of Checkerboard Corners for
  9085. Calibration" demonstrating that the returned sub-pixel positions are more
  9086. accurate than the one returned by cornerSubPix allowing a precise camera
  9087. calibration for demanding applications.
  9088. In the case, the flags REF: CALIB_CB_LARGER or REF: CALIB_CB_MARKER are given,
  9089. the result can be recovered from the optional meta array. Both flags are
  9090. helpful to use calibration patterns exceeding the field of view of the camera.
  9091. These oversized patterns allow more accurate calibrations as corners can be
  9092. utilized, which are as close as possible to the image borders. For a
  9093. consistent coordinate system across all images, the optional marker (see image
  9094. below) can be used to move the origin of the board to the location where the
  9095. black circle is located.
  9096. <b>Note:</b> The function requires a white boarder with roughly the same width as one
  9097. of the checkerboard fields around the whole board to improve the detection in
  9098. various environments. In addition, because of the localized radon
  9099. transformation it is beneficial to use round corners for the field corners
  9100. which are located on the outside of the board. The following figure illustrates
  9101. a sample checkerboard optimized for the detection. However, any other checkerboard
  9102. can be used as well.
  9103. Use gen_pattern.py (REF: tutorial_camera_calibration_pattern) to create checkerboard.
  9104. ![Checkerboard](pics/checkerboard_radon.png)</dd>
  9105. <dt><span class="returnLabel">Returns:</span></dt>
  9106. <dd>automatically generated</dd>
  9107. </dl>
  9108. </li>
  9109. </ul>
  9110. <a name="findCirclesGrid-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-">
  9111. <!-- -->
  9112. </a>
  9113. <ul class="blockList">
  9114. <li class="blockList">
  9115. <h4>findCirclesGrid</h4>
  9116. <pre>public static&nbsp;boolean&nbsp;findCirclesGrid(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  9117. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  9118. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;centers)</pre>
  9119. </li>
  9120. </ul>
  9121. <a name="findCirclesGrid-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-int-">
  9122. <!-- -->
  9123. </a>
  9124. <ul class="blockList">
  9125. <li class="blockList">
  9126. <h4>findCirclesGrid</h4>
  9127. <pre>public static&nbsp;boolean&nbsp;findCirclesGrid(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;image,
  9128. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;patternSize,
  9129. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;centers,
  9130. int&nbsp;flags)</pre>
  9131. </li>
  9132. </ul>
  9133. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-">
  9134. <!-- -->
  9135. </a>
  9136. <ul class="blockList">
  9137. <li class="blockList">
  9138. <h4>findEssentialMat</h4>
  9139. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9140. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2)</pre>
  9141. <dl>
  9142. <dt><span class="paramLabel">Parameters:</span></dt>
  9143. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9144. be floating-point (single or double precision).</dd>
  9145. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .
  9146. are feature points from cameras with same focal length and principal point.
  9147. <ul>
  9148. <li>
  9149. REF: RANSAC for the RANSAC algorithm.
  9150. </li>
  9151. <li>
  9152. REF: LMEDS for the LMedS algorithm.
  9153. </li>
  9154. </ul>
  9155. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9156. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9157. point localization, image resolution, and the image noise.
  9158. confidence (probability) that the estimated matrix is correct.
  9159. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9160. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9161. principal point:
  9162. \(A =
  9163. \begin{bmatrix}
  9164. f &amp; 0 &amp; x_{pp} \\
  9165. 0 &amp; f &amp; y_{pp} \\
  9166. 0 &amp; 0 &amp; 1
  9167. \end{bmatrix}\)</dd>
  9168. <dt><span class="returnLabel">Returns:</span></dt>
  9169. <dd>automatically generated</dd>
  9170. </dl>
  9171. </li>
  9172. </ul>
  9173. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  9174. <!-- -->
  9175. </a>
  9176. <ul class="blockList">
  9177. <li class="blockList">
  9178. <h4>findEssentialMat</h4>
  9179. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9180. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9181. double&nbsp;focal)</pre>
  9182. <dl>
  9183. <dt><span class="paramLabel">Parameters:</span></dt>
  9184. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9185. be floating-point (single or double precision).</dd>
  9186. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9187. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9188. are feature points from cameras with same focal length and principal point.
  9189. <ul>
  9190. <li>
  9191. REF: RANSAC for the RANSAC algorithm.
  9192. </li>
  9193. <li>
  9194. REF: LMEDS for the LMedS algorithm.
  9195. </li>
  9196. </ul>
  9197. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9198. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9199. point localization, image resolution, and the image noise.
  9200. confidence (probability) that the estimated matrix is correct.
  9201. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9202. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9203. principal point:
  9204. \(A =
  9205. \begin{bmatrix}
  9206. f &amp; 0 &amp; x_{pp} \\
  9207. 0 &amp; f &amp; y_{pp} \\
  9208. 0 &amp; 0 &amp; 1
  9209. \end{bmatrix}\)</dd>
  9210. <dt><span class="returnLabel">Returns:</span></dt>
  9211. <dd>automatically generated</dd>
  9212. </dl>
  9213. </li>
  9214. </ul>
  9215. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-">
  9216. <!-- -->
  9217. </a>
  9218. <ul class="blockList">
  9219. <li class="blockList">
  9220. <h4>findEssentialMat</h4>
  9221. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9222. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9223. double&nbsp;focal,
  9224. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp)</pre>
  9225. <dl>
  9226. <dt><span class="paramLabel">Parameters:</span></dt>
  9227. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9228. be floating-point (single or double precision).</dd>
  9229. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9230. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9231. are feature points from cameras with same focal length and principal point.</dd>
  9232. <dd><code>pp</code> - principal point of the camera.
  9233. <ul>
  9234. <li>
  9235. REF: RANSAC for the RANSAC algorithm.
  9236. </li>
  9237. <li>
  9238. REF: LMEDS for the LMedS algorithm.
  9239. </li>
  9240. </ul>
  9241. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9242. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9243. point localization, image resolution, and the image noise.
  9244. confidence (probability) that the estimated matrix is correct.
  9245. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9246. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9247. principal point:
  9248. \(A =
  9249. \begin{bmatrix}
  9250. f &amp; 0 &amp; x_{pp} \\
  9251. 0 &amp; f &amp; y_{pp} \\
  9252. 0 &amp; 0 &amp; 1
  9253. \end{bmatrix}\)</dd>
  9254. <dt><span class="returnLabel">Returns:</span></dt>
  9255. <dd>automatically generated</dd>
  9256. </dl>
  9257. </li>
  9258. </ul>
  9259. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-">
  9260. <!-- -->
  9261. </a>
  9262. <ul class="blockList">
  9263. <li class="blockList">
  9264. <h4>findEssentialMat</h4>
  9265. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9266. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9267. double&nbsp;focal,
  9268. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  9269. int&nbsp;method)</pre>
  9270. <dl>
  9271. <dt><span class="paramLabel">Parameters:</span></dt>
  9272. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9273. be floating-point (single or double precision).</dd>
  9274. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9275. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9276. are feature points from cameras with same focal length and principal point.</dd>
  9277. <dd><code>pp</code> - principal point of the camera.</dd>
  9278. <dd><code>method</code> - Method for computing a fundamental matrix.
  9279. <ul>
  9280. <li>
  9281. REF: RANSAC for the RANSAC algorithm.
  9282. </li>
  9283. <li>
  9284. REF: LMEDS for the LMedS algorithm.
  9285. </li>
  9286. </ul>
  9287. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9288. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9289. point localization, image resolution, and the image noise.
  9290. confidence (probability) that the estimated matrix is correct.
  9291. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9292. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9293. principal point:
  9294. \(A =
  9295. \begin{bmatrix}
  9296. f &amp; 0 &amp; x_{pp} \\
  9297. 0 &amp; f &amp; y_{pp} \\
  9298. 0 &amp; 0 &amp; 1
  9299. \end{bmatrix}\)</dd>
  9300. <dt><span class="returnLabel">Returns:</span></dt>
  9301. <dd>automatically generated</dd>
  9302. </dl>
  9303. </li>
  9304. </ul>
  9305. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-">
  9306. <!-- -->
  9307. </a>
  9308. <ul class="blockList">
  9309. <li class="blockList">
  9310. <h4>findEssentialMat</h4>
  9311. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9312. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9313. double&nbsp;focal,
  9314. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  9315. int&nbsp;method,
  9316. double&nbsp;prob)</pre>
  9317. <dl>
  9318. <dt><span class="paramLabel">Parameters:</span></dt>
  9319. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9320. be floating-point (single or double precision).</dd>
  9321. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9322. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9323. are feature points from cameras with same focal length and principal point.</dd>
  9324. <dd><code>pp</code> - principal point of the camera.</dd>
  9325. <dd><code>method</code> - Method for computing a fundamental matrix.
  9326. <ul>
  9327. <li>
  9328. REF: RANSAC for the RANSAC algorithm.
  9329. </li>
  9330. <li>
  9331. REF: LMEDS for the LMedS algorithm.
  9332. </li>
  9333. </ul>
  9334. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9335. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9336. point localization, image resolution, and the image noise.</dd>
  9337. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9338. confidence (probability) that the estimated matrix is correct.
  9339. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9340. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9341. principal point:
  9342. \(A =
  9343. \begin{bmatrix}
  9344. f &amp; 0 &amp; x_{pp} \\
  9345. 0 &amp; f &amp; y_{pp} \\
  9346. 0 &amp; 0 &amp; 1
  9347. \end{bmatrix}\)</dd>
  9348. <dt><span class="returnLabel">Returns:</span></dt>
  9349. <dd>automatically generated</dd>
  9350. </dl>
  9351. </li>
  9352. </ul>
  9353. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-double-">
  9354. <!-- -->
  9355. </a>
  9356. <ul class="blockList">
  9357. <li class="blockList">
  9358. <h4>findEssentialMat</h4>
  9359. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9360. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9361. double&nbsp;focal,
  9362. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  9363. int&nbsp;method,
  9364. double&nbsp;prob,
  9365. double&nbsp;threshold)</pre>
  9366. <dl>
  9367. <dt><span class="paramLabel">Parameters:</span></dt>
  9368. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9369. be floating-point (single or double precision).</dd>
  9370. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9371. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9372. are feature points from cameras with same focal length and principal point.</dd>
  9373. <dd><code>pp</code> - principal point of the camera.</dd>
  9374. <dd><code>method</code> - Method for computing a fundamental matrix.
  9375. <ul>
  9376. <li>
  9377. REF: RANSAC for the RANSAC algorithm.
  9378. </li>
  9379. <li>
  9380. REF: LMEDS for the LMedS algorithm.
  9381. </li>
  9382. </ul></dd>
  9383. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  9384. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9385. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9386. point localization, image resolution, and the image noise.</dd>
  9387. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9388. confidence (probability) that the estimated matrix is correct.
  9389. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9390. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9391. principal point:
  9392. \(A =
  9393. \begin{bmatrix}
  9394. f &amp; 0 &amp; x_{pp} \\
  9395. 0 &amp; f &amp; y_{pp} \\
  9396. 0 &amp; 0 &amp; 1
  9397. \end{bmatrix}\)</dd>
  9398. <dt><span class="returnLabel">Returns:</span></dt>
  9399. <dd>automatically generated</dd>
  9400. </dl>
  9401. </li>
  9402. </ul>
  9403. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-double-int-">
  9404. <!-- -->
  9405. </a>
  9406. <ul class="blockList">
  9407. <li class="blockList">
  9408. <h4>findEssentialMat</h4>
  9409. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9410. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9411. double&nbsp;focal,
  9412. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  9413. int&nbsp;method,
  9414. double&nbsp;prob,
  9415. double&nbsp;threshold,
  9416. int&nbsp;maxIters)</pre>
  9417. <dl>
  9418. <dt><span class="paramLabel">Parameters:</span></dt>
  9419. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9420. be floating-point (single or double precision).</dd>
  9421. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9422. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9423. are feature points from cameras with same focal length and principal point.</dd>
  9424. <dd><code>pp</code> - principal point of the camera.</dd>
  9425. <dd><code>method</code> - Method for computing a fundamental matrix.
  9426. <ul>
  9427. <li>
  9428. REF: RANSAC for the RANSAC algorithm.
  9429. </li>
  9430. <li>
  9431. REF: LMEDS for the LMedS algorithm.
  9432. </li>
  9433. </ul></dd>
  9434. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  9435. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9436. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9437. point localization, image resolution, and the image noise.</dd>
  9438. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9439. confidence (probability) that the estimated matrix is correct.
  9440. for the other points. The array is computed only in the RANSAC and LMedS methods.</dd>
  9441. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  9442. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9443. principal point:
  9444. \(A =
  9445. \begin{bmatrix}
  9446. f &amp; 0 &amp; x_{pp} \\
  9447. 0 &amp; f &amp; y_{pp} \\
  9448. 0 &amp; 0 &amp; 1
  9449. \end{bmatrix}\)</dd>
  9450. <dt><span class="returnLabel">Returns:</span></dt>
  9451. <dd>automatically generated</dd>
  9452. </dl>
  9453. </li>
  9454. </ul>
  9455. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-int-double-double-int-org.opencv.core.Mat-">
  9456. <!-- -->
  9457. </a>
  9458. <ul class="blockList">
  9459. <li class="blockList">
  9460. <h4>findEssentialMat</h4>
  9461. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9462. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9463. double&nbsp;focal,
  9464. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  9465. int&nbsp;method,
  9466. double&nbsp;prob,
  9467. double&nbsp;threshold,
  9468. int&nbsp;maxIters,
  9469. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  9470. <dl>
  9471. <dt><span class="paramLabel">Parameters:</span></dt>
  9472. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9473. be floating-point (single or double precision).</dd>
  9474. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9475. <dd><code>focal</code> - focal length of the camera. Note that this function assumes that points1 and points2
  9476. are feature points from cameras with same focal length and principal point.</dd>
  9477. <dd><code>pp</code> - principal point of the camera.</dd>
  9478. <dd><code>method</code> - Method for computing a fundamental matrix.
  9479. <ul>
  9480. <li>
  9481. REF: RANSAC for the RANSAC algorithm.
  9482. </li>
  9483. <li>
  9484. REF: LMEDS for the LMedS algorithm.
  9485. </li>
  9486. </ul></dd>
  9487. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  9488. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9489. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9490. point localization, image resolution, and the image noise.</dd>
  9491. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9492. confidence (probability) that the estimated matrix is correct.</dd>
  9493. <dd><code>mask</code> - Output array of N elements, every element of which is set to 0 for outliers and to 1
  9494. for the other points. The array is computed only in the RANSAC and LMedS methods.</dd>
  9495. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  9496. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  9497. principal point:
  9498. \(A =
  9499. \begin{bmatrix}
  9500. f &amp; 0 &amp; x_{pp} \\
  9501. 0 &amp; f &amp; y_{pp} \\
  9502. 0 &amp; 0 &amp; 1
  9503. \end{bmatrix}\)</dd>
  9504. <dt><span class="returnLabel">Returns:</span></dt>
  9505. <dd>automatically generated</dd>
  9506. </dl>
  9507. </li>
  9508. </ul>
  9509. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  9510. <!-- -->
  9511. </a>
  9512. <ul class="blockList">
  9513. <li class="blockList">
  9514. <h4>findEssentialMat</h4>
  9515. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9516. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9517. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix)</pre>
  9518. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  9519. <dl>
  9520. <dt><span class="paramLabel">Parameters:</span></dt>
  9521. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9522. be floating-point (single or double precision).</dd>
  9523. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9524. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  9525. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9526. same camera intrinsic matrix. If this assumption does not hold for your use case, use
  9527. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9528. to normalized image coordinates, which are valid for the identity camera intrinsic matrix. When
  9529. passing these coordinates, pass the identity matrix for this parameter.
  9530. <ul>
  9531. <li>
  9532. REF: RANSAC for the RANSAC algorithm.
  9533. </li>
  9534. <li>
  9535. REF: LMEDS for the LMedS algorithm.
  9536. </li>
  9537. </ul>
  9538. confidence (probability) that the estimated matrix is correct.
  9539. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9540. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9541. point localization, image resolution, and the image noise.
  9542. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9543. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9544. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9545. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9546. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9547. second images, respectively. The result of this function may be passed further to
  9548. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9549. <dt><span class="returnLabel">Returns:</span></dt>
  9550. <dd>automatically generated</dd>
  9551. </dl>
  9552. </li>
  9553. </ul>
  9554. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  9555. <!-- -->
  9556. </a>
  9557. <ul class="blockList">
  9558. <li class="blockList">
  9559. <h4>findEssentialMat</h4>
  9560. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9561. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9562. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  9563. int&nbsp;method)</pre>
  9564. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  9565. <dl>
  9566. <dt><span class="paramLabel">Parameters:</span></dt>
  9567. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9568. be floating-point (single or double precision).</dd>
  9569. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9570. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  9571. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9572. same camera intrinsic matrix. If this assumption does not hold for your use case, use
  9573. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9574. to normalized image coordinates, which are valid for the identity camera intrinsic matrix. When
  9575. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9576. <dd><code>method</code> - Method for computing an essential matrix.
  9577. <ul>
  9578. <li>
  9579. REF: RANSAC for the RANSAC algorithm.
  9580. </li>
  9581. <li>
  9582. REF: LMEDS for the LMedS algorithm.
  9583. </li>
  9584. </ul>
  9585. confidence (probability) that the estimated matrix is correct.
  9586. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9587. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9588. point localization, image resolution, and the image noise.
  9589. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9590. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9591. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9592. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9593. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9594. second images, respectively. The result of this function may be passed further to
  9595. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9596. <dt><span class="returnLabel">Returns:</span></dt>
  9597. <dd>automatically generated</dd>
  9598. </dl>
  9599. </li>
  9600. </ul>
  9601. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">
  9602. <!-- -->
  9603. </a>
  9604. <ul class="blockList">
  9605. <li class="blockList">
  9606. <h4>findEssentialMat</h4>
  9607. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9608. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9609. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  9610. int&nbsp;method,
  9611. double&nbsp;prob)</pre>
  9612. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  9613. <dl>
  9614. <dt><span class="paramLabel">Parameters:</span></dt>
  9615. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9616. be floating-point (single or double precision).</dd>
  9617. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9618. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  9619. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9620. same camera intrinsic matrix. If this assumption does not hold for your use case, use
  9621. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9622. to normalized image coordinates, which are valid for the identity camera intrinsic matrix. When
  9623. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9624. <dd><code>method</code> - Method for computing an essential matrix.
  9625. <ul>
  9626. <li>
  9627. REF: RANSAC for the RANSAC algorithm.
  9628. </li>
  9629. <li>
  9630. REF: LMEDS for the LMedS algorithm.
  9631. </li>
  9632. </ul></dd>
  9633. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9634. confidence (probability) that the estimated matrix is correct.
  9635. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9636. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9637. point localization, image resolution, and the image noise.
  9638. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9639. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9640. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9641. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9642. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9643. second images, respectively. The result of this function may be passed further to
  9644. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9645. <dt><span class="returnLabel">Returns:</span></dt>
  9646. <dd>automatically generated</dd>
  9647. </dl>
  9648. </li>
  9649. </ul>
  9650. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-">
  9651. <!-- -->
  9652. </a>
  9653. <ul class="blockList">
  9654. <li class="blockList">
  9655. <h4>findEssentialMat</h4>
  9656. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9657. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9658. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  9659. int&nbsp;method,
  9660. double&nbsp;prob,
  9661. double&nbsp;threshold)</pre>
  9662. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  9663. <dl>
  9664. <dt><span class="paramLabel">Parameters:</span></dt>
  9665. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9666. be floating-point (single or double precision).</dd>
  9667. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9668. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  9669. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9670. same camera intrinsic matrix. If this assumption does not hold for your use case, use
  9671. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9672. to normalized image coordinates, which are valid for the identity camera intrinsic matrix. When
  9673. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9674. <dd><code>method</code> - Method for computing an essential matrix.
  9675. <ul>
  9676. <li>
  9677. REF: RANSAC for the RANSAC algorithm.
  9678. </li>
  9679. <li>
  9680. REF: LMEDS for the LMedS algorithm.
  9681. </li>
  9682. </ul></dd>
  9683. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9684. confidence (probability) that the estimated matrix is correct.</dd>
  9685. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  9686. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9687. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9688. point localization, image resolution, and the image noise.
  9689. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9690. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9691. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9692. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9693. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9694. second images, respectively. The result of this function may be passed further to
  9695. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9696. <dt><span class="returnLabel">Returns:</span></dt>
  9697. <dd>automatically generated</dd>
  9698. </dl>
  9699. </li>
  9700. </ul>
  9701. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-int-">
  9702. <!-- -->
  9703. </a>
  9704. <ul class="blockList">
  9705. <li class="blockList">
  9706. <h4>findEssentialMat</h4>
  9707. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9708. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9709. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  9710. int&nbsp;method,
  9711. double&nbsp;prob,
  9712. double&nbsp;threshold,
  9713. int&nbsp;maxIters)</pre>
  9714. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  9715. <dl>
  9716. <dt><span class="paramLabel">Parameters:</span></dt>
  9717. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9718. be floating-point (single or double precision).</dd>
  9719. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9720. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  9721. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9722. same camera intrinsic matrix. If this assumption does not hold for your use case, use
  9723. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9724. to normalized image coordinates, which are valid for the identity camera intrinsic matrix. When
  9725. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9726. <dd><code>method</code> - Method for computing an essential matrix.
  9727. <ul>
  9728. <li>
  9729. REF: RANSAC for the RANSAC algorithm.
  9730. </li>
  9731. <li>
  9732. REF: LMEDS for the LMedS algorithm.
  9733. </li>
  9734. </ul></dd>
  9735. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9736. confidence (probability) that the estimated matrix is correct.</dd>
  9737. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  9738. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9739. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9740. point localization, image resolution, and the image noise.
  9741. for the other points. The array is computed only in the RANSAC and LMedS methods.</dd>
  9742. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  9743. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9744. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9745. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9746. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9747. second images, respectively. The result of this function may be passed further to
  9748. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9749. <dt><span class="returnLabel">Returns:</span></dt>
  9750. <dd>automatically generated</dd>
  9751. </dl>
  9752. </li>
  9753. </ul>
  9754. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-int-org.opencv.core.Mat-">
  9755. <!-- -->
  9756. </a>
  9757. <ul class="blockList">
  9758. <li class="blockList">
  9759. <h4>findEssentialMat</h4>
  9760. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9761. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9762. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  9763. int&nbsp;method,
  9764. double&nbsp;prob,
  9765. double&nbsp;threshold,
  9766. int&nbsp;maxIters,
  9767. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  9768. <div class="block">Calculates an essential matrix from the corresponding points in two images.</div>
  9769. <dl>
  9770. <dt><span class="paramLabel">Parameters:</span></dt>
  9771. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9772. be floating-point (single or double precision).</dd>
  9773. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9774. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  9775. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9776. same camera intrinsic matrix. If this assumption does not hold for your use case, use
  9777. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9778. to normalized image coordinates, which are valid for the identity camera intrinsic matrix. When
  9779. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9780. <dd><code>method</code> - Method for computing an essential matrix.
  9781. <ul>
  9782. <li>
  9783. REF: RANSAC for the RANSAC algorithm.
  9784. </li>
  9785. <li>
  9786. REF: LMEDS for the LMedS algorithm.
  9787. </li>
  9788. </ul></dd>
  9789. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9790. confidence (probability) that the estimated matrix is correct.</dd>
  9791. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  9792. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9793. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9794. point localization, image resolution, and the image noise.</dd>
  9795. <dd><code>mask</code> - Output array of N elements, every element of which is set to 0 for outliers and to 1
  9796. for the other points. The array is computed only in the RANSAC and LMedS methods.</dd>
  9797. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  9798. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9799. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9800. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9801. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9802. second images, respectively. The result of this function may be passed further to
  9803. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9804. <dt><span class="returnLabel">Returns:</span></dt>
  9805. <dd>automatically generated</dd>
  9806. </dl>
  9807. </li>
  9808. </ul>
  9809. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  9810. <!-- -->
  9811. </a>
  9812. <ul class="blockList">
  9813. <li class="blockList">
  9814. <h4>findEssentialMat</h4>
  9815. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9816. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9817. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  9818. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  9819. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  9820. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2)</pre>
  9821. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  9822. <dl>
  9823. <dt><span class="paramLabel">Parameters:</span></dt>
  9824. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9825. be floating-point (single or double precision).</dd>
  9826. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9827. <dd><code>cameraMatrix1</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  9828. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9829. same camera matrix. If this assumption does not hold for your use case, use
  9830. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9831. to normalized image coordinates, which are valid for the identity camera matrix. When
  9832. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9833. <dd><code>cameraMatrix2</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  9834. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9835. same camera matrix. If this assumption does not hold for your use case, use
  9836. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9837. to normalized image coordinates, which are valid for the identity camera matrix. When
  9838. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9839. <dd><code>distCoeffs1</code> - Input vector of distortion coefficients
  9840. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  9841. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  9842. <dd><code>distCoeffs2</code> - Input vector of distortion coefficients
  9843. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  9844. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
  9845. <ul>
  9846. <li>
  9847. REF: RANSAC for the RANSAC algorithm.
  9848. </li>
  9849. <li>
  9850. REF: LMEDS for the LMedS algorithm.
  9851. </li>
  9852. </ul>
  9853. confidence (probability) that the estimated matrix is correct.
  9854. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9855. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9856. point localization, image resolution, and the image noise.
  9857. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9858. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9859. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9860. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9861. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9862. second images, respectively. The result of this function may be passed further to
  9863. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9864. <dt><span class="returnLabel">Returns:</span></dt>
  9865. <dd>automatically generated</dd>
  9866. </dl>
  9867. </li>
  9868. </ul>
  9869. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  9870. <!-- -->
  9871. </a>
  9872. <ul class="blockList">
  9873. <li class="blockList">
  9874. <h4>findEssentialMat</h4>
  9875. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9876. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9877. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  9878. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  9879. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  9880. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  9881. int&nbsp;method)</pre>
  9882. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  9883. <dl>
  9884. <dt><span class="paramLabel">Parameters:</span></dt>
  9885. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9886. be floating-point (single or double precision).</dd>
  9887. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9888. <dd><code>cameraMatrix1</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  9889. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9890. same camera matrix. If this assumption does not hold for your use case, use
  9891. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9892. to normalized image coordinates, which are valid for the identity camera matrix. When
  9893. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9894. <dd><code>cameraMatrix2</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  9895. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9896. same camera matrix. If this assumption does not hold for your use case, use
  9897. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9898. to normalized image coordinates, which are valid for the identity camera matrix. When
  9899. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9900. <dd><code>distCoeffs1</code> - Input vector of distortion coefficients
  9901. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  9902. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  9903. <dd><code>distCoeffs2</code> - Input vector of distortion coefficients
  9904. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  9905. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  9906. <dd><code>method</code> - Method for computing an essential matrix.
  9907. <ul>
  9908. <li>
  9909. REF: RANSAC for the RANSAC algorithm.
  9910. </li>
  9911. <li>
  9912. REF: LMEDS for the LMedS algorithm.
  9913. </li>
  9914. </ul>
  9915. confidence (probability) that the estimated matrix is correct.
  9916. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9917. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9918. point localization, image resolution, and the image noise.
  9919. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9920. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9921. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9922. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9923. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9924. second images, respectively. The result of this function may be passed further to
  9925. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9926. <dt><span class="returnLabel">Returns:</span></dt>
  9927. <dd>automatically generated</dd>
  9928. </dl>
  9929. </li>
  9930. </ul>
  9931. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">
  9932. <!-- -->
  9933. </a>
  9934. <ul class="blockList">
  9935. <li class="blockList">
  9936. <h4>findEssentialMat</h4>
  9937. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  9938. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  9939. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  9940. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  9941. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  9942. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  9943. int&nbsp;method,
  9944. double&nbsp;prob)</pre>
  9945. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  9946. <dl>
  9947. <dt><span class="paramLabel">Parameters:</span></dt>
  9948. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  9949. be floating-point (single or double precision).</dd>
  9950. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  9951. <dd><code>cameraMatrix1</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  9952. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9953. same camera matrix. If this assumption does not hold for your use case, use
  9954. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9955. to normalized image coordinates, which are valid for the identity camera matrix. When
  9956. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9957. <dd><code>cameraMatrix2</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  9958. Note that this function assumes that points1 and points2 are feature points from cameras with the
  9959. same camera matrix. If this assumption does not hold for your use case, use
  9960. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  9961. to normalized image coordinates, which are valid for the identity camera matrix. When
  9962. passing these coordinates, pass the identity matrix for this parameter.</dd>
  9963. <dd><code>distCoeffs1</code> - Input vector of distortion coefficients
  9964. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  9965. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  9966. <dd><code>distCoeffs2</code> - Input vector of distortion coefficients
  9967. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  9968. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  9969. <dd><code>method</code> - Method for computing an essential matrix.
  9970. <ul>
  9971. <li>
  9972. REF: RANSAC for the RANSAC algorithm.
  9973. </li>
  9974. <li>
  9975. REF: LMEDS for the LMedS algorithm.
  9976. </li>
  9977. </ul></dd>
  9978. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  9979. confidence (probability) that the estimated matrix is correct.
  9980. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  9981. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  9982. point localization, image resolution, and the image noise.
  9983. for the other points. The array is computed only in the RANSAC and LMedS methods.
  9984. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  9985. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  9986. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  9987. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  9988. second images, respectively. The result of this function may be passed further to
  9989. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  9990. <dt><span class="returnLabel">Returns:</span></dt>
  9991. <dd>automatically generated</dd>
  9992. </dl>
  9993. </li>
  9994. </ul>
  9995. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-">
  9996. <!-- -->
  9997. </a>
  9998. <ul class="blockList">
  9999. <li class="blockList">
  10000. <h4>findEssentialMat</h4>
  10001. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  10002. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  10003. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  10004. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  10005. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  10006. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  10007. int&nbsp;method,
  10008. double&nbsp;prob,
  10009. double&nbsp;threshold)</pre>
  10010. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  10011. <dl>
  10012. <dt><span class="paramLabel">Parameters:</span></dt>
  10013. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  10014. be floating-point (single or double precision).</dd>
  10015. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  10016. <dd><code>cameraMatrix1</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  10017. Note that this function assumes that points1 and points2 are feature points from cameras with the
  10018. same camera matrix. If this assumption does not hold for your use case, use
  10019. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  10020. to normalized image coordinates, which are valid for the identity camera matrix. When
  10021. passing these coordinates, pass the identity matrix for this parameter.</dd>
  10022. <dd><code>cameraMatrix2</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  10023. Note that this function assumes that points1 and points2 are feature points from cameras with the
  10024. same camera matrix. If this assumption does not hold for your use case, use
  10025. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  10026. to normalized image coordinates, which are valid for the identity camera matrix. When
  10027. passing these coordinates, pass the identity matrix for this parameter.</dd>
  10028. <dd><code>distCoeffs1</code> - Input vector of distortion coefficients
  10029. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  10030. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  10031. <dd><code>distCoeffs2</code> - Input vector of distortion coefficients
  10032. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  10033. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  10034. <dd><code>method</code> - Method for computing an essential matrix.
  10035. <ul>
  10036. <li>
  10037. REF: RANSAC for the RANSAC algorithm.
  10038. </li>
  10039. <li>
  10040. REF: LMEDS for the LMedS algorithm.
  10041. </li>
  10042. </ul></dd>
  10043. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  10044. confidence (probability) that the estimated matrix is correct.</dd>
  10045. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  10046. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  10047. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  10048. point localization, image resolution, and the image noise.
  10049. for the other points. The array is computed only in the RANSAC and LMedS methods.
  10050. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  10051. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  10052. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  10053. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  10054. second images, respectively. The result of this function may be passed further to
  10055. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  10056. <dt><span class="returnLabel">Returns:</span></dt>
  10057. <dd>automatically generated</dd>
  10058. </dl>
  10059. </li>
  10060. </ul>
  10061. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-org.opencv.core.Mat-">
  10062. <!-- -->
  10063. </a>
  10064. <ul class="blockList">
  10065. <li class="blockList">
  10066. <h4>findEssentialMat</h4>
  10067. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  10068. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  10069. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  10070. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  10071. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  10072. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  10073. int&nbsp;method,
  10074. double&nbsp;prob,
  10075. double&nbsp;threshold,
  10076. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  10077. <div class="block">Calculates an essential matrix from the corresponding points in two images from potentially two different cameras.</div>
  10078. <dl>
  10079. <dt><span class="paramLabel">Parameters:</span></dt>
  10080. <dd><code>points1</code> - Array of N (N &gt;= 5) 2D points from the first image. The point coordinates should
  10081. be floating-point (single or double precision).</dd>
  10082. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  10083. <dd><code>cameraMatrix1</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  10084. Note that this function assumes that points1 and points2 are feature points from cameras with the
  10085. same camera matrix. If this assumption does not hold for your use case, use
  10086. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  10087. to normalized image coordinates, which are valid for the identity camera matrix. When
  10088. passing these coordinates, pass the identity matrix for this parameter.</dd>
  10089. <dd><code>cameraMatrix2</code> - Camera matrix \(K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .
  10090. Note that this function assumes that points1 and points2 are feature points from cameras with the
  10091. same camera matrix. If this assumption does not hold for your use case, use
  10092. #undistortPoints with <code>P = cv::NoArray()</code> for both cameras to transform image points
  10093. to normalized image coordinates, which are valid for the identity camera matrix. When
  10094. passing these coordinates, pass the identity matrix for this parameter.</dd>
  10095. <dd><code>distCoeffs1</code> - Input vector of distortion coefficients
  10096. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  10097. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  10098. <dd><code>distCoeffs2</code> - Input vector of distortion coefficients
  10099. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  10100. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  10101. <dd><code>method</code> - Method for computing an essential matrix.
  10102. <ul>
  10103. <li>
  10104. REF: RANSAC for the RANSAC algorithm.
  10105. </li>
  10106. <li>
  10107. REF: LMEDS for the LMedS algorithm.
  10108. </li>
  10109. </ul></dd>
  10110. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  10111. confidence (probability) that the estimated matrix is correct.</dd>
  10112. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  10113. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  10114. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  10115. point localization, image resolution, and the image noise.</dd>
  10116. <dd><code>mask</code> - Output array of N elements, every element of which is set to 0 for outliers and to 1
  10117. for the other points. The array is computed only in the RANSAC and LMedS methods.
  10118. This function estimates essential matrix based on the five-point algorithm solver in CITE: Nister03 .
  10119. CITE: SteweniusCFS is also a related. The epipolar geometry is described by the following equation:
  10120. \([p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\)
  10121. where \(E\) is an essential matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  10122. second images, respectively. The result of this function may be passed further to
  10123. #decomposeEssentialMat or #recoverPose to recover the relative pose between cameras.</dd>
  10124. <dt><span class="returnLabel">Returns:</span></dt>
  10125. <dd>automatically generated</dd>
  10126. </dl>
  10127. </li>
  10128. </ul>
  10129. <a name="findEssentialMat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">
  10130. <!-- -->
  10131. </a>
  10132. <ul class="blockList">
  10133. <li class="blockList">
  10134. <h4>findEssentialMat</h4>
  10135. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findEssentialMat(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  10136. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  10137. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  10138. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  10139. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dist_coeff1,
  10140. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dist_coeff2,
  10141. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  10142. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</pre>
  10143. </li>
  10144. </ul>
  10145. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-">
  10146. <!-- -->
  10147. </a>
  10148. <ul class="blockList">
  10149. <li class="blockList">
  10150. <h4>findFundamentalMat</h4>
  10151. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10152. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2)</pre>
  10153. </li>
  10154. </ul>
  10155. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-">
  10156. <!-- -->
  10157. </a>
  10158. <ul class="blockList">
  10159. <li class="blockList">
  10160. <h4>findFundamentalMat</h4>
  10161. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10162. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10163. int&nbsp;method)</pre>
  10164. </li>
  10165. </ul>
  10166. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-">
  10167. <!-- -->
  10168. </a>
  10169. <ul class="blockList">
  10170. <li class="blockList">
  10171. <h4>findFundamentalMat</h4>
  10172. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10173. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10174. int&nbsp;method,
  10175. double&nbsp;ransacReprojThreshold)</pre>
  10176. </li>
  10177. </ul>
  10178. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-">
  10179. <!-- -->
  10180. </a>
  10181. <ul class="blockList">
  10182. <li class="blockList">
  10183. <h4>findFundamentalMat</h4>
  10184. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10185. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10186. int&nbsp;method,
  10187. double&nbsp;ransacReprojThreshold,
  10188. double&nbsp;confidence)</pre>
  10189. </li>
  10190. </ul>
  10191. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-int-">
  10192. <!-- -->
  10193. </a>
  10194. <ul class="blockList">
  10195. <li class="blockList">
  10196. <h4>findFundamentalMat</h4>
  10197. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10198. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10199. int&nbsp;method,
  10200. double&nbsp;ransacReprojThreshold,
  10201. double&nbsp;confidence,
  10202. int&nbsp;maxIters)</pre>
  10203. <div class="block">Calculates a fundamental matrix from the corresponding points in two images.</div>
  10204. <dl>
  10205. <dt><span class="paramLabel">Parameters:</span></dt>
  10206. <dd><code>points1</code> - Array of N points from the first image. The point coordinates should be
  10207. floating-point (single or double precision).</dd>
  10208. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  10209. <dd><code>method</code> - Method for computing a fundamental matrix.
  10210. <ul>
  10211. <li>
  10212. REF: FM_7POINT for a 7-point algorithm. \(N = 7\)
  10213. </li>
  10214. <li>
  10215. REF: FM_8POINT for an 8-point algorithm. \(N \ge 8\)
  10216. </li>
  10217. <li>
  10218. REF: FM_RANSAC for the RANSAC algorithm. \(N \ge 8\)
  10219. </li>
  10220. <li>
  10221. REF: FM_LMEDS for the LMedS algorithm. \(N \ge 8\)
  10222. </li>
  10223. </ul></dd>
  10224. <dd><code>ransacReprojThreshold</code> - Parameter used only for RANSAC. It is the maximum distance from a point to an epipolar
  10225. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  10226. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  10227. point localization, image resolution, and the image noise.</dd>
  10228. <dd><code>confidence</code> - Parameter used for the RANSAC and LMedS methods only. It specifies a desirable level
  10229. of confidence (probability) that the estimated matrix is correct.</dd>
  10230. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  10231. The epipolar geometry is described by the following equation:
  10232. \([p_2; 1]^T F [p_1; 1] = 0\)
  10233. where \(F\) is a fundamental matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  10234. second images, respectively.
  10235. The function calculates the fundamental matrix using one of four methods listed above and returns
  10236. the found fundamental matrix. Normally just one matrix is found. But in case of the 7-point
  10237. algorithm, the function may return up to 3 solutions ( \(9 \times 3\) matrix that stores all 3
  10238. matrices sequentially).
  10239. The calculated fundamental matrix may be passed further to #computeCorrespondEpilines that finds the
  10240. epipolar lines corresponding to the specified points. It can also be passed to
  10241. #stereoRectifyUncalibrated to compute the rectification transformation. :
  10242. <code>
  10243. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  10244. int point_count = 100;
  10245. vector&lt;Point2f&gt; points1(point_count);
  10246. vector&lt;Point2f&gt; points2(point_count);
  10247. // initialize the points here ...
  10248. for( int i = 0; i &lt; point_count; i++ )
  10249. {
  10250. points1[i] = ...;
  10251. points2[i] = ...;
  10252. }
  10253. Mat fundamental_matrix =
  10254. findFundamentalMat(points1, points2, FM_RANSAC, 3, 0.99);
  10255. </code></dd>
  10256. <dt><span class="returnLabel">Returns:</span></dt>
  10257. <dd>automatically generated</dd>
  10258. </dl>
  10259. </li>
  10260. </ul>
  10261. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-int-org.opencv.core.Mat-">
  10262. <!-- -->
  10263. </a>
  10264. <ul class="blockList">
  10265. <li class="blockList">
  10266. <h4>findFundamentalMat</h4>
  10267. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10268. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10269. int&nbsp;method,
  10270. double&nbsp;ransacReprojThreshold,
  10271. double&nbsp;confidence,
  10272. int&nbsp;maxIters,
  10273. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  10274. <div class="block">Calculates a fundamental matrix from the corresponding points in two images.</div>
  10275. <dl>
  10276. <dt><span class="paramLabel">Parameters:</span></dt>
  10277. <dd><code>points1</code> - Array of N points from the first image. The point coordinates should be
  10278. floating-point (single or double precision).</dd>
  10279. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  10280. <dd><code>method</code> - Method for computing a fundamental matrix.
  10281. <ul>
  10282. <li>
  10283. REF: FM_7POINT for a 7-point algorithm. \(N = 7\)
  10284. </li>
  10285. <li>
  10286. REF: FM_8POINT for an 8-point algorithm. \(N \ge 8\)
  10287. </li>
  10288. <li>
  10289. REF: FM_RANSAC for the RANSAC algorithm. \(N \ge 8\)
  10290. </li>
  10291. <li>
  10292. REF: FM_LMEDS for the LMedS algorithm. \(N \ge 8\)
  10293. </li>
  10294. </ul></dd>
  10295. <dd><code>ransacReprojThreshold</code> - Parameter used only for RANSAC. It is the maximum distance from a point to an epipolar
  10296. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  10297. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  10298. point localization, image resolution, and the image noise.</dd>
  10299. <dd><code>confidence</code> - Parameter used for the RANSAC and LMedS methods only. It specifies a desirable level
  10300. of confidence (probability) that the estimated matrix is correct.</dd>
  10301. <dd><code>mask</code> - optional output mask</dd>
  10302. <dd><code>maxIters</code> - The maximum number of robust method iterations.
  10303. The epipolar geometry is described by the following equation:
  10304. \([p_2; 1]^T F [p_1; 1] = 0\)
  10305. where \(F\) is a fundamental matrix, \(p_1\) and \(p_2\) are corresponding points in the first and the
  10306. second images, respectively.
  10307. The function calculates the fundamental matrix using one of four methods listed above and returns
  10308. the found fundamental matrix. Normally just one matrix is found. But in case of the 7-point
  10309. algorithm, the function may return up to 3 solutions ( \(9 \times 3\) matrix that stores all 3
  10310. matrices sequentially).
  10311. The calculated fundamental matrix may be passed further to #computeCorrespondEpilines that finds the
  10312. epipolar lines corresponding to the specified points. It can also be passed to
  10313. #stereoRectifyUncalibrated to compute the rectification transformation. :
  10314. <code>
  10315. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  10316. int point_count = 100;
  10317. vector&lt;Point2f&gt; points1(point_count);
  10318. vector&lt;Point2f&gt; points2(point_count);
  10319. // initialize the points here ...
  10320. for( int i = 0; i &lt; point_count; i++ )
  10321. {
  10322. points1[i] = ...;
  10323. points2[i] = ...;
  10324. }
  10325. Mat fundamental_matrix =
  10326. findFundamentalMat(points1, points2, FM_RANSAC, 3, 0.99);
  10327. </code></dd>
  10328. <dt><span class="returnLabel">Returns:</span></dt>
  10329. <dd>automatically generated</dd>
  10330. </dl>
  10331. </li>
  10332. </ul>
  10333. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-double-org.opencv.core.Mat-">
  10334. <!-- -->
  10335. </a>
  10336. <ul class="blockList">
  10337. <li class="blockList">
  10338. <h4>findFundamentalMat</h4>
  10339. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10340. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10341. int&nbsp;method,
  10342. double&nbsp;ransacReprojThreshold,
  10343. double&nbsp;confidence,
  10344. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  10345. </li>
  10346. </ul>
  10347. <a name="findFundamentalMat-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">
  10348. <!-- -->
  10349. </a>
  10350. <ul class="blockList">
  10351. <li class="blockList">
  10352. <h4>findFundamentalMat</h4>
  10353. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findFundamentalMat(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points1,
  10354. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;points2,
  10355. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  10356. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</pre>
  10357. </li>
  10358. </ul>
  10359. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-">
  10360. <!-- -->
  10361. </a>
  10362. <ul class="blockList">
  10363. <li class="blockList">
  10364. <h4>findHomography</h4>
  10365. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10366. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints)</pre>
  10367. <div class="block">Finds a perspective transformation between two planes.</div>
  10368. <dl>
  10369. <dt><span class="paramLabel">Parameters:</span></dt>
  10370. <dd><code>srcPoints</code> - Coordinates of the points in the original plane, a matrix of the type CV_32FC2
  10371. or vector&lt;Point2f&gt; .</dd>
  10372. <dd><code>dstPoints</code> - Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
  10373. a vector&lt;Point2f&gt; .
  10374. <ul>
  10375. <li>
  10376. <b>0</b> - a regular method using all the points, i.e., the least squares method
  10377. </li>
  10378. <li>
  10379. REF: RANSAC - RANSAC-based robust method
  10380. </li>
  10381. <li>
  10382. REF: LMEDS - Least-Median robust method
  10383. </li>
  10384. <li>
  10385. REF: RHO - PROSAC-based robust method
  10386. </li>
  10387. </ul>
  10388. (used in the RANSAC and RHO methods only). That is, if
  10389. \(\| \texttt{dstPoints} _i - \texttt{convertPointsHomogeneous} ( \texttt{H} \cdot \texttt{srcPoints} _i) \|_2 &gt; \texttt{ransacReprojThreshold}\)
  10390. then the point \(i\) is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
  10391. it usually makes sense to set this parameter somewhere in the range of 1 to 10.
  10392. mask values are ignored.
  10393. The function finds and returns the perspective transformation \(H\) between the source and the
  10394. destination planes:
  10395. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\)
  10396. so that the back-projection error
  10397. \(\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\)
  10398. is minimized. If the parameter method is set to the default value 0, the function uses all the point
  10399. pairs to compute an initial homography estimate with a simple least-squares scheme.
  10400. However, if not all of the point pairs ( \(srcPoints_i\), \(dstPoints_i\) ) fit the rigid perspective
  10401. transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
  10402. you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
  10403. random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
  10404. using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
  10405. computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
  10406. LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
  10407. the mask of inliers/outliers.
  10408. Regardless of the method, robust or not, the computed homography matrix is refined further (using
  10409. inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
  10410. re-projection error even more.
  10411. The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
  10412. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  10413. correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
  10414. noise is rather small, use the default method (method=0).
  10415. The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
  10416. determined up to a scale. Thus, it is normalized so that \(h_{33}=1\). Note that whenever an \(H\) matrix
  10417. cannot be estimated, an empty one will be returned.
  10418. SEE:
  10419. getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
  10420. perspectiveTransform</dd>
  10421. <dt><span class="returnLabel">Returns:</span></dt>
  10422. <dd>automatically generated</dd>
  10423. </dl>
  10424. </li>
  10425. </ul>
  10426. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-">
  10427. <!-- -->
  10428. </a>
  10429. <ul class="blockList">
  10430. <li class="blockList">
  10431. <h4>findHomography</h4>
  10432. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10433. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  10434. int&nbsp;method)</pre>
  10435. <div class="block">Finds a perspective transformation between two planes.</div>
  10436. <dl>
  10437. <dt><span class="paramLabel">Parameters:</span></dt>
  10438. <dd><code>srcPoints</code> - Coordinates of the points in the original plane, a matrix of the type CV_32FC2
  10439. or vector&lt;Point2f&gt; .</dd>
  10440. <dd><code>dstPoints</code> - Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
  10441. a vector&lt;Point2f&gt; .</dd>
  10442. <dd><code>method</code> - Method used to compute a homography matrix. The following methods are possible:
  10443. <ul>
  10444. <li>
  10445. <b>0</b> - a regular method using all the points, i.e., the least squares method
  10446. </li>
  10447. <li>
  10448. REF: RANSAC - RANSAC-based robust method
  10449. </li>
  10450. <li>
  10451. REF: LMEDS - Least-Median robust method
  10452. </li>
  10453. <li>
  10454. REF: RHO - PROSAC-based robust method
  10455. </li>
  10456. </ul>
  10457. (used in the RANSAC and RHO methods only). That is, if
  10458. \(\| \texttt{dstPoints} _i - \texttt{convertPointsHomogeneous} ( \texttt{H} \cdot \texttt{srcPoints} _i) \|_2 &gt; \texttt{ransacReprojThreshold}\)
  10459. then the point \(i\) is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
  10460. it usually makes sense to set this parameter somewhere in the range of 1 to 10.
  10461. mask values are ignored.
  10462. The function finds and returns the perspective transformation \(H\) between the source and the
  10463. destination planes:
  10464. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\)
  10465. so that the back-projection error
  10466. \(\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\)
  10467. is minimized. If the parameter method is set to the default value 0, the function uses all the point
  10468. pairs to compute an initial homography estimate with a simple least-squares scheme.
  10469. However, if not all of the point pairs ( \(srcPoints_i\), \(dstPoints_i\) ) fit the rigid perspective
  10470. transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
  10471. you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
  10472. random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
  10473. using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
  10474. computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
  10475. LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
  10476. the mask of inliers/outliers.
  10477. Regardless of the method, robust or not, the computed homography matrix is refined further (using
  10478. inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
  10479. re-projection error even more.
  10480. The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
  10481. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  10482. correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
  10483. noise is rather small, use the default method (method=0).
  10484. The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
  10485. determined up to a scale. Thus, it is normalized so that \(h_{33}=1\). Note that whenever an \(H\) matrix
  10486. cannot be estimated, an empty one will be returned.
  10487. SEE:
  10488. getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
  10489. perspectiveTransform</dd>
  10490. <dt><span class="returnLabel">Returns:</span></dt>
  10491. <dd>automatically generated</dd>
  10492. </dl>
  10493. </li>
  10494. </ul>
  10495. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-">
  10496. <!-- -->
  10497. </a>
  10498. <ul class="blockList">
  10499. <li class="blockList">
  10500. <h4>findHomography</h4>
  10501. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10502. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  10503. int&nbsp;method,
  10504. double&nbsp;ransacReprojThreshold)</pre>
  10505. <div class="block">Finds a perspective transformation between two planes.</div>
  10506. <dl>
  10507. <dt><span class="paramLabel">Parameters:</span></dt>
  10508. <dd><code>srcPoints</code> - Coordinates of the points in the original plane, a matrix of the type CV_32FC2
  10509. or vector&lt;Point2f&gt; .</dd>
  10510. <dd><code>dstPoints</code> - Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
  10511. a vector&lt;Point2f&gt; .</dd>
  10512. <dd><code>method</code> - Method used to compute a homography matrix. The following methods are possible:
  10513. <ul>
  10514. <li>
  10515. <b>0</b> - a regular method using all the points, i.e., the least squares method
  10516. </li>
  10517. <li>
  10518. REF: RANSAC - RANSAC-based robust method
  10519. </li>
  10520. <li>
  10521. REF: LMEDS - Least-Median robust method
  10522. </li>
  10523. <li>
  10524. REF: RHO - PROSAC-based robust method
  10525. </li>
  10526. </ul></dd>
  10527. <dd><code>ransacReprojThreshold</code> - Maximum allowed reprojection error to treat a point pair as an inlier
  10528. (used in the RANSAC and RHO methods only). That is, if
  10529. \(\| \texttt{dstPoints} _i - \texttt{convertPointsHomogeneous} ( \texttt{H} \cdot \texttt{srcPoints} _i) \|_2 &gt; \texttt{ransacReprojThreshold}\)
  10530. then the point \(i\) is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
  10531. it usually makes sense to set this parameter somewhere in the range of 1 to 10.
  10532. mask values are ignored.
  10533. The function finds and returns the perspective transformation \(H\) between the source and the
  10534. destination planes:
  10535. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\)
  10536. so that the back-projection error
  10537. \(\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\)
  10538. is minimized. If the parameter method is set to the default value 0, the function uses all the point
  10539. pairs to compute an initial homography estimate with a simple least-squares scheme.
  10540. However, if not all of the point pairs ( \(srcPoints_i\), \(dstPoints_i\) ) fit the rigid perspective
  10541. transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
  10542. you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
  10543. random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
  10544. using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
  10545. computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
  10546. LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
  10547. the mask of inliers/outliers.
  10548. Regardless of the method, robust or not, the computed homography matrix is refined further (using
  10549. inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
  10550. re-projection error even more.
  10551. The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
  10552. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  10553. correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
  10554. noise is rather small, use the default method (method=0).
  10555. The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
  10556. determined up to a scale. Thus, it is normalized so that \(h_{33}=1\). Note that whenever an \(H\) matrix
  10557. cannot be estimated, an empty one will be returned.
  10558. SEE:
  10559. getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
  10560. perspectiveTransform</dd>
  10561. <dt><span class="returnLabel">Returns:</span></dt>
  10562. <dd>automatically generated</dd>
  10563. </dl>
  10564. </li>
  10565. </ul>
  10566. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-org.opencv.core.Mat-">
  10567. <!-- -->
  10568. </a>
  10569. <ul class="blockList">
  10570. <li class="blockList">
  10571. <h4>findHomography</h4>
  10572. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10573. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  10574. int&nbsp;method,
  10575. double&nbsp;ransacReprojThreshold,
  10576. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  10577. <div class="block">Finds a perspective transformation between two planes.</div>
  10578. <dl>
  10579. <dt><span class="paramLabel">Parameters:</span></dt>
  10580. <dd><code>srcPoints</code> - Coordinates of the points in the original plane, a matrix of the type CV_32FC2
  10581. or vector&lt;Point2f&gt; .</dd>
  10582. <dd><code>dstPoints</code> - Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
  10583. a vector&lt;Point2f&gt; .</dd>
  10584. <dd><code>method</code> - Method used to compute a homography matrix. The following methods are possible:
  10585. <ul>
  10586. <li>
  10587. <b>0</b> - a regular method using all the points, i.e., the least squares method
  10588. </li>
  10589. <li>
  10590. REF: RANSAC - RANSAC-based robust method
  10591. </li>
  10592. <li>
  10593. REF: LMEDS - Least-Median robust method
  10594. </li>
  10595. <li>
  10596. REF: RHO - PROSAC-based robust method
  10597. </li>
  10598. </ul></dd>
  10599. <dd><code>ransacReprojThreshold</code> - Maximum allowed reprojection error to treat a point pair as an inlier
  10600. (used in the RANSAC and RHO methods only). That is, if
  10601. \(\| \texttt{dstPoints} _i - \texttt{convertPointsHomogeneous} ( \texttt{H} \cdot \texttt{srcPoints} _i) \|_2 &gt; \texttt{ransacReprojThreshold}\)
  10602. then the point \(i\) is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
  10603. it usually makes sense to set this parameter somewhere in the range of 1 to 10.</dd>
  10604. <dd><code>mask</code> - Optional output mask set by a robust method ( RANSAC or LMeDS ). Note that the input
  10605. mask values are ignored.
  10606. The function finds and returns the perspective transformation \(H\) between the source and the
  10607. destination planes:
  10608. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\)
  10609. so that the back-projection error
  10610. \(\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\)
  10611. is minimized. If the parameter method is set to the default value 0, the function uses all the point
  10612. pairs to compute an initial homography estimate with a simple least-squares scheme.
  10613. However, if not all of the point pairs ( \(srcPoints_i\), \(dstPoints_i\) ) fit the rigid perspective
  10614. transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
  10615. you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
  10616. random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
  10617. using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
  10618. computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
  10619. LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
  10620. the mask of inliers/outliers.
  10621. Regardless of the method, robust or not, the computed homography matrix is refined further (using
  10622. inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
  10623. re-projection error even more.
  10624. The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
  10625. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  10626. correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
  10627. noise is rather small, use the default method (method=0).
  10628. The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
  10629. determined up to a scale. Thus, it is normalized so that \(h_{33}=1\). Note that whenever an \(H\) matrix
  10630. cannot be estimated, an empty one will be returned.
  10631. SEE:
  10632. getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
  10633. perspectiveTransform</dd>
  10634. <dt><span class="returnLabel">Returns:</span></dt>
  10635. <dd>automatically generated</dd>
  10636. </dl>
  10637. </li>
  10638. </ul>
  10639. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-org.opencv.core.Mat-int-">
  10640. <!-- -->
  10641. </a>
  10642. <ul class="blockList">
  10643. <li class="blockList">
  10644. <h4>findHomography</h4>
  10645. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10646. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  10647. int&nbsp;method,
  10648. double&nbsp;ransacReprojThreshold,
  10649. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  10650. int&nbsp;maxIters)</pre>
  10651. <div class="block">Finds a perspective transformation between two planes.</div>
  10652. <dl>
  10653. <dt><span class="paramLabel">Parameters:</span></dt>
  10654. <dd><code>srcPoints</code> - Coordinates of the points in the original plane, a matrix of the type CV_32FC2
  10655. or vector&lt;Point2f&gt; .</dd>
  10656. <dd><code>dstPoints</code> - Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
  10657. a vector&lt;Point2f&gt; .</dd>
  10658. <dd><code>method</code> - Method used to compute a homography matrix. The following methods are possible:
  10659. <ul>
  10660. <li>
  10661. <b>0</b> - a regular method using all the points, i.e., the least squares method
  10662. </li>
  10663. <li>
  10664. REF: RANSAC - RANSAC-based robust method
  10665. </li>
  10666. <li>
  10667. REF: LMEDS - Least-Median robust method
  10668. </li>
  10669. <li>
  10670. REF: RHO - PROSAC-based robust method
  10671. </li>
  10672. </ul></dd>
  10673. <dd><code>ransacReprojThreshold</code> - Maximum allowed reprojection error to treat a point pair as an inlier
  10674. (used in the RANSAC and RHO methods only). That is, if
  10675. \(\| \texttt{dstPoints} _i - \texttt{convertPointsHomogeneous} ( \texttt{H} \cdot \texttt{srcPoints} _i) \|_2 &gt; \texttt{ransacReprojThreshold}\)
  10676. then the point \(i\) is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
  10677. it usually makes sense to set this parameter somewhere in the range of 1 to 10.</dd>
  10678. <dd><code>mask</code> - Optional output mask set by a robust method ( RANSAC or LMeDS ). Note that the input
  10679. mask values are ignored.</dd>
  10680. <dd><code>maxIters</code> - The maximum number of RANSAC iterations.
  10681. The function finds and returns the perspective transformation \(H\) between the source and the
  10682. destination planes:
  10683. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\)
  10684. so that the back-projection error
  10685. \(\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\)
  10686. is minimized. If the parameter method is set to the default value 0, the function uses all the point
  10687. pairs to compute an initial homography estimate with a simple least-squares scheme.
  10688. However, if not all of the point pairs ( \(srcPoints_i\), \(dstPoints_i\) ) fit the rigid perspective
  10689. transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
  10690. you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
  10691. random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
  10692. using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
  10693. computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
  10694. LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
  10695. the mask of inliers/outliers.
  10696. Regardless of the method, robust or not, the computed homography matrix is refined further (using
  10697. inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
  10698. re-projection error even more.
  10699. The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
  10700. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  10701. correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
  10702. noise is rather small, use the default method (method=0).
  10703. The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
  10704. determined up to a scale. Thus, it is normalized so that \(h_{33}=1\). Note that whenever an \(H\) matrix
  10705. cannot be estimated, an empty one will be returned.
  10706. SEE:
  10707. getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
  10708. perspectiveTransform</dd>
  10709. <dt><span class="returnLabel">Returns:</span></dt>
  10710. <dd>automatically generated</dd>
  10711. </dl>
  10712. </li>
  10713. </ul>
  10714. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-int-double-org.opencv.core.Mat-int-double-">
  10715. <!-- -->
  10716. </a>
  10717. <ul class="blockList">
  10718. <li class="blockList">
  10719. <h4>findHomography</h4>
  10720. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10721. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  10722. int&nbsp;method,
  10723. double&nbsp;ransacReprojThreshold,
  10724. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  10725. int&nbsp;maxIters,
  10726. double&nbsp;confidence)</pre>
  10727. <div class="block">Finds a perspective transformation between two planes.</div>
  10728. <dl>
  10729. <dt><span class="paramLabel">Parameters:</span></dt>
  10730. <dd><code>srcPoints</code> - Coordinates of the points in the original plane, a matrix of the type CV_32FC2
  10731. or vector&lt;Point2f&gt; .</dd>
  10732. <dd><code>dstPoints</code> - Coordinates of the points in the target plane, a matrix of the type CV_32FC2 or
  10733. a vector&lt;Point2f&gt; .</dd>
  10734. <dd><code>method</code> - Method used to compute a homography matrix. The following methods are possible:
  10735. <ul>
  10736. <li>
  10737. <b>0</b> - a regular method using all the points, i.e., the least squares method
  10738. </li>
  10739. <li>
  10740. REF: RANSAC - RANSAC-based robust method
  10741. </li>
  10742. <li>
  10743. REF: LMEDS - Least-Median robust method
  10744. </li>
  10745. <li>
  10746. REF: RHO - PROSAC-based robust method
  10747. </li>
  10748. </ul></dd>
  10749. <dd><code>ransacReprojThreshold</code> - Maximum allowed reprojection error to treat a point pair as an inlier
  10750. (used in the RANSAC and RHO methods only). That is, if
  10751. \(\| \texttt{dstPoints} _i - \texttt{convertPointsHomogeneous} ( \texttt{H} \cdot \texttt{srcPoints} _i) \|_2 &gt; \texttt{ransacReprojThreshold}\)
  10752. then the point \(i\) is considered as an outlier. If srcPoints and dstPoints are measured in pixels,
  10753. it usually makes sense to set this parameter somewhere in the range of 1 to 10.</dd>
  10754. <dd><code>mask</code> - Optional output mask set by a robust method ( RANSAC or LMeDS ). Note that the input
  10755. mask values are ignored.</dd>
  10756. <dd><code>maxIters</code> - The maximum number of RANSAC iterations.</dd>
  10757. <dd><code>confidence</code> - Confidence level, between 0 and 1.
  10758. The function finds and returns the perspective transformation \(H\) between the source and the
  10759. destination planes:
  10760. \(s_i \vecthree{x'_i}{y'_i}{1} \sim H \vecthree{x_i}{y_i}{1}\)
  10761. so that the back-projection error
  10762. \(\sum _i \left ( x'_i- \frac{h_{11} x_i + h_{12} y_i + h_{13}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2+ \left ( y'_i- \frac{h_{21} x_i + h_{22} y_i + h_{23}}{h_{31} x_i + h_{32} y_i + h_{33}} \right )^2\)
  10763. is minimized. If the parameter method is set to the default value 0, the function uses all the point
  10764. pairs to compute an initial homography estimate with a simple least-squares scheme.
  10765. However, if not all of the point pairs ( \(srcPoints_i\), \(dstPoints_i\) ) fit the rigid perspective
  10766. transformation (that is, there are some outliers), this initial estimate will be poor. In this case,
  10767. you can use one of the three robust methods. The methods RANSAC, LMeDS and RHO try many different
  10768. random subsets of the corresponding point pairs (of four pairs each, collinear pairs are discarded), estimate the homography matrix
  10769. using this subset and a simple least-squares algorithm, and then compute the quality/goodness of the
  10770. computed homography (which is the number of inliers for RANSAC or the least median re-projection error for
  10771. LMeDS). The best subset is then used to produce the initial estimate of the homography matrix and
  10772. the mask of inliers/outliers.
  10773. Regardless of the method, robust or not, the computed homography matrix is refined further (using
  10774. inliers only in case of a robust method) with the Levenberg-Marquardt method to reduce the
  10775. re-projection error even more.
  10776. The methods RANSAC and RHO can handle practically any ratio of outliers but need a threshold to
  10777. distinguish inliers from outliers. The method LMeDS does not need any threshold but it works
  10778. correctly only when there are more than 50% of inliers. Finally, if there are no outliers and the
  10779. noise is rather small, use the default method (method=0).
  10780. The function is used to find initial intrinsic and extrinsic matrices. Homography matrix is
  10781. determined up to a scale. Thus, it is normalized so that \(h_{33}=1\). Note that whenever an \(H\) matrix
  10782. cannot be estimated, an empty one will be returned.
  10783. SEE:
  10784. getAffineTransform, estimateAffine2D, estimateAffinePartial2D, getPerspectiveTransform, warpPerspective,
  10785. perspectiveTransform</dd>
  10786. <dt><span class="returnLabel">Returns:</span></dt>
  10787. <dd>automatically generated</dd>
  10788. </dl>
  10789. </li>
  10790. </ul>
  10791. <a name="findHomography-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">
  10792. <!-- -->
  10793. </a>
  10794. <ul class="blockList">
  10795. <li class="blockList">
  10796. <h4>findHomography</h4>
  10797. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;findHomography(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;srcPoints,
  10798. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dstPoints,
  10799. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  10800. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</pre>
  10801. </li>
  10802. </ul>
  10803. <a name="fisheye_calibrate-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">
  10804. <!-- -->
  10805. </a>
  10806. <ul class="blockList">
  10807. <li class="blockList">
  10808. <h4>fisheye_calibrate</h4>
  10809. <pre>public static&nbsp;double&nbsp;fisheye_calibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  10810. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  10811. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  10812. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  10813. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  10814. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  10815. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</pre>
  10816. <div class="block">Performs camera calibration</div>
  10817. <dl>
  10818. <dt><span class="paramLabel">Parameters:</span></dt>
  10819. <dd><code>objectPoints</code> - vector of vectors of calibration pattern points in the calibration pattern
  10820. coordinate space.</dd>
  10821. <dd><code>imagePoints</code> - vector of vectors of the projections of calibration pattern points.
  10822. imagePoints.size() and objectPoints.size() and imagePoints[i].size() must be equal to
  10823. objectPoints[i].size() for each i.</dd>
  10824. <dd><code>image_size</code> - Size of the image used only to initialize the camera intrinsic matrix.</dd>
  10825. <dd><code>K</code> - Output 3x3 floating-point camera intrinsic matrix
  10826. \(\cameramatrix{A}\) . If
  10827. REF: fisheye::CALIB_USE_INTRINSIC_GUESS is specified, some or all of fx, fy, cx, cy must be
  10828. initialized before calling the function.</dd>
  10829. <dd><code>D</code> - Output vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  10830. <dd><code>rvecs</code> - Output vector of rotation vectors (see Rodrigues ) estimated for each pattern view.
  10831. That is, each k-th rotation vector together with the corresponding k-th translation vector (see
  10832. the next output parameter description) brings the calibration pattern from the model coordinate
  10833. space (in which object points are specified) to the world coordinate space, that is, a real
  10834. position of the calibration pattern in the k-th pattern view (k=0.. *M* -1).</dd>
  10835. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view.
  10836. <ul>
  10837. <li>
  10838. REF: fisheye::CALIB_USE_INTRINSIC_GUESS cameraMatrix contains valid initial values of
  10839. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  10840. center ( imageSize is used), and focal distances are computed in a least-squares fashion.
  10841. </li>
  10842. <li>
  10843. REF: fisheye::CALIB_RECOMPUTE_EXTRINSIC Extrinsic will be recomputed after each iteration
  10844. of intrinsic optimization.
  10845. </li>
  10846. <li>
  10847. REF: fisheye::CALIB_CHECK_COND The functions will check validity of condition number.
  10848. </li>
  10849. <li>
  10850. REF: fisheye::CALIB_FIX_SKEW Skew coefficient (alpha) is set to zero and stay zero.
  10851. </li>
  10852. <li>
  10853. REF: fisheye::CALIB_FIX_K1,..., REF: fisheye::CALIB_FIX_K4 Selected distortion coefficients
  10854. are set to zeros and stay zero.
  10855. </li>
  10856. <li>
  10857. REF: fisheye::CALIB_FIX_PRINCIPAL_POINT The principal point is not changed during the global
  10858. optimization. It stays at the center or at a different location specified when REF: fisheye::CALIB_USE_INTRINSIC_GUESS is set too.
  10859. </li>
  10860. <li>
  10861. REF: fisheye::CALIB_FIX_FOCAL_LENGTH The focal length is not changed during the global
  10862. optimization. It is the \(max(width,height)/\pi\) or the provided \(f_x\), \(f_y\) when REF: fisheye::CALIB_USE_INTRINSIC_GUESS is set too.
  10863. </li>
  10864. </ul></dd>
  10865. <dt><span class="returnLabel">Returns:</span></dt>
  10866. <dd>automatically generated</dd>
  10867. </dl>
  10868. </li>
  10869. </ul>
  10870. <a name="fisheye_calibrate-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">
  10871. <!-- -->
  10872. </a>
  10873. <ul class="blockList">
  10874. <li class="blockList">
  10875. <h4>fisheye_calibrate</h4>
  10876. <pre>public static&nbsp;double&nbsp;fisheye_calibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  10877. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  10878. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  10879. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  10880. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  10881. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  10882. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  10883. int&nbsp;flags)</pre>
  10884. <div class="block">Performs camera calibration</div>
  10885. <dl>
  10886. <dt><span class="paramLabel">Parameters:</span></dt>
  10887. <dd><code>objectPoints</code> - vector of vectors of calibration pattern points in the calibration pattern
  10888. coordinate space.</dd>
  10889. <dd><code>imagePoints</code> - vector of vectors of the projections of calibration pattern points.
  10890. imagePoints.size() and objectPoints.size() and imagePoints[i].size() must be equal to
  10891. objectPoints[i].size() for each i.</dd>
  10892. <dd><code>image_size</code> - Size of the image used only to initialize the camera intrinsic matrix.</dd>
  10893. <dd><code>K</code> - Output 3x3 floating-point camera intrinsic matrix
  10894. \(\cameramatrix{A}\) . If
  10895. REF: fisheye::CALIB_USE_INTRINSIC_GUESS is specified, some or all of fx, fy, cx, cy must be
  10896. initialized before calling the function.</dd>
  10897. <dd><code>D</code> - Output vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  10898. <dd><code>rvecs</code> - Output vector of rotation vectors (see Rodrigues ) estimated for each pattern view.
  10899. That is, each k-th rotation vector together with the corresponding k-th translation vector (see
  10900. the next output parameter description) brings the calibration pattern from the model coordinate
  10901. space (in which object points are specified) to the world coordinate space, that is, a real
  10902. position of the calibration pattern in the k-th pattern view (k=0.. *M* -1).</dd>
  10903. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view.</dd>
  10904. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  10905. <ul>
  10906. <li>
  10907. REF: fisheye::CALIB_USE_INTRINSIC_GUESS cameraMatrix contains valid initial values of
  10908. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  10909. center ( imageSize is used), and focal distances are computed in a least-squares fashion.
  10910. </li>
  10911. <li>
  10912. REF: fisheye::CALIB_RECOMPUTE_EXTRINSIC Extrinsic will be recomputed after each iteration
  10913. of intrinsic optimization.
  10914. </li>
  10915. <li>
  10916. REF: fisheye::CALIB_CHECK_COND The functions will check validity of condition number.
  10917. </li>
  10918. <li>
  10919. REF: fisheye::CALIB_FIX_SKEW Skew coefficient (alpha) is set to zero and stay zero.
  10920. </li>
  10921. <li>
  10922. REF: fisheye::CALIB_FIX_K1,..., REF: fisheye::CALIB_FIX_K4 Selected distortion coefficients
  10923. are set to zeros and stay zero.
  10924. </li>
  10925. <li>
  10926. REF: fisheye::CALIB_FIX_PRINCIPAL_POINT The principal point is not changed during the global
  10927. optimization. It stays at the center or at a different location specified when REF: fisheye::CALIB_USE_INTRINSIC_GUESS is set too.
  10928. </li>
  10929. <li>
  10930. REF: fisheye::CALIB_FIX_FOCAL_LENGTH The focal length is not changed during the global
  10931. optimization. It is the \(max(width,height)/\pi\) or the provided \(f_x\), \(f_y\) when REF: fisheye::CALIB_USE_INTRINSIC_GUESS is set too.
  10932. </li>
  10933. </ul></dd>
  10934. <dt><span class="returnLabel">Returns:</span></dt>
  10935. <dd>automatically generated</dd>
  10936. </dl>
  10937. </li>
  10938. </ul>
  10939. <a name="fisheye_calibrate-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-org.opencv.core.TermCriteria-">
  10940. <!-- -->
  10941. </a>
  10942. <ul class="blockList">
  10943. <li class="blockList">
  10944. <h4>fisheye_calibrate</h4>
  10945. <pre>public static&nbsp;double&nbsp;fisheye_calibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  10946. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints,
  10947. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  10948. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  10949. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  10950. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  10951. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  10952. int&nbsp;flags,
  10953. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  10954. <div class="block">Performs camera calibration</div>
  10955. <dl>
  10956. <dt><span class="paramLabel">Parameters:</span></dt>
  10957. <dd><code>objectPoints</code> - vector of vectors of calibration pattern points in the calibration pattern
  10958. coordinate space.</dd>
  10959. <dd><code>imagePoints</code> - vector of vectors of the projections of calibration pattern points.
  10960. imagePoints.size() and objectPoints.size() and imagePoints[i].size() must be equal to
  10961. objectPoints[i].size() for each i.</dd>
  10962. <dd><code>image_size</code> - Size of the image used only to initialize the camera intrinsic matrix.</dd>
  10963. <dd><code>K</code> - Output 3x3 floating-point camera intrinsic matrix
  10964. \(\cameramatrix{A}\) . If
  10965. REF: fisheye::CALIB_USE_INTRINSIC_GUESS is specified, some or all of fx, fy, cx, cy must be
  10966. initialized before calling the function.</dd>
  10967. <dd><code>D</code> - Output vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  10968. <dd><code>rvecs</code> - Output vector of rotation vectors (see Rodrigues ) estimated for each pattern view.
  10969. That is, each k-th rotation vector together with the corresponding k-th translation vector (see
  10970. the next output parameter description) brings the calibration pattern from the model coordinate
  10971. space (in which object points are specified) to the world coordinate space, that is, a real
  10972. position of the calibration pattern in the k-th pattern view (k=0.. *M* -1).</dd>
  10973. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view.</dd>
  10974. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  10975. <ul>
  10976. <li>
  10977. REF: fisheye::CALIB_USE_INTRINSIC_GUESS cameraMatrix contains valid initial values of
  10978. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  10979. center ( imageSize is used), and focal distances are computed in a least-squares fashion.
  10980. </li>
  10981. <li>
  10982. REF: fisheye::CALIB_RECOMPUTE_EXTRINSIC Extrinsic will be recomputed after each iteration
  10983. of intrinsic optimization.
  10984. </li>
  10985. <li>
  10986. REF: fisheye::CALIB_CHECK_COND The functions will check validity of condition number.
  10987. </li>
  10988. <li>
  10989. REF: fisheye::CALIB_FIX_SKEW Skew coefficient (alpha) is set to zero and stay zero.
  10990. </li>
  10991. <li>
  10992. REF: fisheye::CALIB_FIX_K1,..., REF: fisheye::CALIB_FIX_K4 Selected distortion coefficients
  10993. are set to zeros and stay zero.
  10994. </li>
  10995. <li>
  10996. REF: fisheye::CALIB_FIX_PRINCIPAL_POINT The principal point is not changed during the global
  10997. optimization. It stays at the center or at a different location specified when REF: fisheye::CALIB_USE_INTRINSIC_GUESS is set too.
  10998. </li>
  10999. <li>
  11000. REF: fisheye::CALIB_FIX_FOCAL_LENGTH The focal length is not changed during the global
  11001. optimization. It is the \(max(width,height)/\pi\) or the provided \(f_x\), \(f_y\) when REF: fisheye::CALIB_USE_INTRINSIC_GUESS is set too.
  11002. </li>
  11003. </ul></dd>
  11004. <dd><code>criteria</code> - Termination criteria for the iterative optimization algorithm.</dd>
  11005. <dt><span class="returnLabel">Returns:</span></dt>
  11006. <dd>automatically generated</dd>
  11007. </dl>
  11008. </li>
  11009. </ul>
  11010. <a name="fisheye_distortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11011. <!-- -->
  11012. </a>
  11013. <ul class="blockList">
  11014. <li class="blockList">
  11015. <h4>fisheye_distortPoints</h4>
  11016. <pre>public static&nbsp;void&nbsp;fisheye_distortPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11017. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11018. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11019. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</pre>
  11020. <div class="block">Distorts 2D points using fisheye model.</div>
  11021. <dl>
  11022. <dt><span class="paramLabel">Parameters:</span></dt>
  11023. <dd><code>undistorted</code> - Array of object points, 1xN/Nx1 2-channel (or vector&lt;Point2f&gt; ), where N is
  11024. the number of points in the view.</dd>
  11025. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11026. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11027. <dd><code>distorted</code> - Output array of image points, 1xN/Nx1 2-channel, or vector&lt;Point2f&gt; .
  11028. Note that the function assumes the camera intrinsic matrix of the undistorted points to be identity.
  11029. This means if you want to distort image points you have to multiply them with \(K^{-1}\).</dd>
  11030. </dl>
  11031. </li>
  11032. </ul>
  11033. <a name="fisheye_distortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  11034. <!-- -->
  11035. </a>
  11036. <ul class="blockList">
  11037. <li class="blockList">
  11038. <h4>fisheye_distortPoints</h4>
  11039. <pre>public static&nbsp;void&nbsp;fisheye_distortPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11040. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11041. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11042. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11043. double&nbsp;alpha)</pre>
  11044. <div class="block">Distorts 2D points using fisheye model.</div>
  11045. <dl>
  11046. <dt><span class="paramLabel">Parameters:</span></dt>
  11047. <dd><code>undistorted</code> - Array of object points, 1xN/Nx1 2-channel (or vector&lt;Point2f&gt; ), where N is
  11048. the number of points in the view.</dd>
  11049. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11050. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11051. <dd><code>alpha</code> - The skew coefficient.</dd>
  11052. <dd><code>distorted</code> - Output array of image points, 1xN/Nx1 2-channel, or vector&lt;Point2f&gt; .
  11053. Note that the function assumes the camera intrinsic matrix of the undistorted points to be identity.
  11054. This means if you want to distort image points you have to multiply them with \(K^{-1}\).</dd>
  11055. </dl>
  11056. </li>
  11057. </ul>
  11058. <a name="fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-">
  11059. <!-- -->
  11060. </a>
  11061. <ul class="blockList">
  11062. <li class="blockList">
  11063. <h4>fisheye_estimateNewCameraMatrixForUndistortRectify</h4>
  11064. <pre>public static&nbsp;void&nbsp;fisheye_estimateNewCameraMatrixForUndistortRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11065. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11066. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  11067. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11068. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P)</pre>
  11069. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  11070. <dl>
  11071. <dt><span class="paramLabel">Parameters:</span></dt>
  11072. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11073. <dd><code>image_size</code> - Size of the image</dd>
  11074. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11075. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11076. 1-channel or 1x1 3-channel</dd>
  11077. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)
  11078. length. Balance is in range of [0, 1].</dd>
  11079. </dl>
  11080. </li>
  11081. </ul>
  11082. <a name="fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  11083. <!-- -->
  11084. </a>
  11085. <ul class="blockList">
  11086. <li class="blockList">
  11087. <h4>fisheye_estimateNewCameraMatrixForUndistortRectify</h4>
  11088. <pre>public static&nbsp;void&nbsp;fisheye_estimateNewCameraMatrixForUndistortRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11089. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11090. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  11091. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11092. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  11093. double&nbsp;balance)</pre>
  11094. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  11095. <dl>
  11096. <dt><span class="paramLabel">Parameters:</span></dt>
  11097. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11098. <dd><code>image_size</code> - Size of the image</dd>
  11099. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11100. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11101. 1-channel or 1x1 3-channel</dd>
  11102. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)</dd>
  11103. <dd><code>balance</code> - Sets the new focal length in range between the min focal length and the max focal
  11104. length. Balance is in range of [0, 1].</dd>
  11105. </dl>
  11106. </li>
  11107. </ul>
  11108. <a name="fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Size-">
  11109. <!-- -->
  11110. </a>
  11111. <ul class="blockList">
  11112. <li class="blockList">
  11113. <h4>fisheye_estimateNewCameraMatrixForUndistortRectify</h4>
  11114. <pre>public static&nbsp;void&nbsp;fisheye_estimateNewCameraMatrixForUndistortRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11115. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11116. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  11117. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11118. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  11119. double&nbsp;balance,
  11120. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;new_size)</pre>
  11121. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  11122. <dl>
  11123. <dt><span class="paramLabel">Parameters:</span></dt>
  11124. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11125. <dd><code>image_size</code> - Size of the image</dd>
  11126. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11127. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11128. 1-channel or 1x1 3-channel</dd>
  11129. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)</dd>
  11130. <dd><code>balance</code> - Sets the new focal length in range between the min focal length and the max focal
  11131. length. Balance is in range of [0, 1].</dd>
  11132. <dd><code>new_size</code> - the new size</dd>
  11133. </dl>
  11134. </li>
  11135. </ul>
  11136. <a name="fisheye_estimateNewCameraMatrixForUndistortRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Size-double-">
  11137. <!-- -->
  11138. </a>
  11139. <ul class="blockList">
  11140. <li class="blockList">
  11141. <h4>fisheye_estimateNewCameraMatrixForUndistortRectify</h4>
  11142. <pre>public static&nbsp;void&nbsp;fisheye_estimateNewCameraMatrixForUndistortRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11143. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11144. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;image_size,
  11145. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11146. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  11147. double&nbsp;balance,
  11148. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;new_size,
  11149. double&nbsp;fov_scale)</pre>
  11150. <div class="block">Estimates new camera intrinsic matrix for undistortion or rectification.</div>
  11151. <dl>
  11152. <dt><span class="paramLabel">Parameters:</span></dt>
  11153. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11154. <dd><code>image_size</code> - Size of the image</dd>
  11155. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11156. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11157. 1-channel or 1x1 3-channel</dd>
  11158. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)</dd>
  11159. <dd><code>balance</code> - Sets the new focal length in range between the min focal length and the max focal
  11160. length. Balance is in range of [0, 1].</dd>
  11161. <dd><code>new_size</code> - the new size</dd>
  11162. <dd><code>fov_scale</code> - Divisor for new focal length.</dd>
  11163. </dl>
  11164. </li>
  11165. </ul>
  11166. <a name="fisheye_initUndistortRectifyMap-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-">
  11167. <!-- -->
  11168. </a>
  11169. <ul class="blockList">
  11170. <li class="blockList">
  11171. <h4>fisheye_initUndistortRectifyMap</h4>
  11172. <pre>public static&nbsp;void&nbsp;fisheye_initUndistortRectifyMap(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11173. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11174. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11175. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  11176. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size,
  11177. int&nbsp;m1type,
  11178. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map1,
  11179. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map2)</pre>
  11180. <div class="block">Computes undistortion and rectification maps for image transform by #remap. If D is empty zero
  11181. distortion is used, if R or P is empty identity matrixes are used.</div>
  11182. <dl>
  11183. <dt><span class="paramLabel">Parameters:</span></dt>
  11184. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11185. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11186. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11187. 1-channel or 1x1 3-channel</dd>
  11188. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)</dd>
  11189. <dd><code>size</code> - Undistorted image size.</dd>
  11190. <dd><code>m1type</code> - Type of the first output map that can be CV_32FC1 or CV_16SC2 . See #convertMaps
  11191. for details.</dd>
  11192. <dd><code>map1</code> - The first output map.</dd>
  11193. <dd><code>map2</code> - The second output map.</dd>
  11194. </dl>
  11195. </li>
  11196. </ul>
  11197. <a name="fisheye_projectPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11198. <!-- -->
  11199. </a>
  11200. <ul class="blockList">
  11201. <li class="blockList">
  11202. <h4>fisheye_projectPoints</h4>
  11203. <pre>public static&nbsp;void&nbsp;fisheye_projectPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  11204. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  11205. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  11206. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11207. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11208. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</pre>
  11209. </li>
  11210. </ul>
  11211. <a name="fisheye_projectPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  11212. <!-- -->
  11213. </a>
  11214. <ul class="blockList">
  11215. <li class="blockList">
  11216. <h4>fisheye_projectPoints</h4>
  11217. <pre>public static&nbsp;void&nbsp;fisheye_projectPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  11218. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  11219. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  11220. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11221. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11222. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11223. double&nbsp;alpha)</pre>
  11224. </li>
  11225. </ul>
  11226. <a name="fisheye_projectPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Mat-">
  11227. <!-- -->
  11228. </a>
  11229. <ul class="blockList">
  11230. <li class="blockList">
  11231. <h4>fisheye_projectPoints</h4>
  11232. <pre>public static&nbsp;void&nbsp;fisheye_projectPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  11233. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  11234. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  11235. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11236. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11237. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11238. double&nbsp;alpha,
  11239. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian)</pre>
  11240. </li>
  11241. </ul>
  11242. <a name="fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-">
  11243. <!-- -->
  11244. </a>
  11245. <ul class="blockList">
  11246. <li class="blockList">
  11247. <h4>fisheye_stereoCalibrate</h4>
  11248. <pre>public static&nbsp;double&nbsp;fisheye_stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  11249. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  11250. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  11251. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11252. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11253. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11254. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11255. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11256. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11257. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T)</pre>
  11258. </li>
  11259. </ul>
  11260. <a name="fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  11261. <!-- -->
  11262. </a>
  11263. <ul class="blockList">
  11264. <li class="blockList">
  11265. <h4>fisheye_stereoCalibrate</h4>
  11266. <pre>public static&nbsp;double&nbsp;fisheye_stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  11267. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  11268. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  11269. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11270. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11271. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11272. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11273. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11274. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11275. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  11276. int&nbsp;flags)</pre>
  11277. </li>
  11278. </ul>
  11279. <a name="fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  11280. <!-- -->
  11281. </a>
  11282. <ul class="blockList">
  11283. <li class="blockList">
  11284. <h4>fisheye_stereoCalibrate</h4>
  11285. <pre>public static&nbsp;double&nbsp;fisheye_stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  11286. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  11287. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  11288. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11289. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11290. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11291. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11292. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11293. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11294. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  11295. int&nbsp;flags,
  11296. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  11297. </li>
  11298. </ul>
  11299. <a name="fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">
  11300. <!-- -->
  11301. </a>
  11302. <ul class="blockList">
  11303. <li class="blockList">
  11304. <h4>fisheye_stereoCalibrate</h4>
  11305. <pre>public static&nbsp;double&nbsp;fisheye_stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  11306. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  11307. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  11308. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11309. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11310. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11311. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11312. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11313. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11314. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  11315. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  11316. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</pre>
  11317. <div class="block">Performs stereo calibration</div>
  11318. <dl>
  11319. <dt><span class="paramLabel">Parameters:</span></dt>
  11320. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points.</dd>
  11321. <dd><code>imagePoints1</code> - Vector of vectors of the projections of the calibration pattern points,
  11322. observed by the first camera.</dd>
  11323. <dd><code>imagePoints2</code> - Vector of vectors of the projections of the calibration pattern points,
  11324. observed by the second camera.</dd>
  11325. <dd><code>K1</code> - Input/output first camera intrinsic matrix:
  11326. \(\vecthreethree{f_x^{(j)}}{0}{c_x^{(j)}}{0}{f_y^{(j)}}{c_y^{(j)}}{0}{0}{1}\) , \(j = 0,\, 1\) . If
  11327. any of REF: fisheye::CALIB_USE_INTRINSIC_GUESS , REF: fisheye::CALIB_FIX_INTRINSIC are specified,
  11328. some or all of the matrix components must be initialized.</dd>
  11329. <dd><code>D1</code> - Input/output vector of distortion coefficients \(\distcoeffsfisheye\) of 4 elements.</dd>
  11330. <dd><code>K2</code> - Input/output second camera intrinsic matrix. The parameter is similar to K1 .</dd>
  11331. <dd><code>D2</code> - Input/output lens distortion coefficients for the second camera. The parameter is
  11332. similar to D1 .</dd>
  11333. <dd><code>imageSize</code> - Size of the image used only to initialize camera intrinsic matrix.</dd>
  11334. <dd><code>R</code> - Output rotation matrix between the 1st and the 2nd camera coordinate systems.</dd>
  11335. <dd><code>T</code> - Output translation vector between the coordinate systems of the cameras.</dd>
  11336. <dd><code>rvecs</code> - Output vector of rotation vectors ( REF: Rodrigues ) estimated for each pattern view in the
  11337. coordinate system of the first camera of the stereo pair (e.g. std::vector&lt;cv::Mat&gt;). More in detail, each
  11338. i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
  11339. description) brings the calibration pattern from the object coordinate space (in which object points are
  11340. specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
  11341. the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
  11342. to camera coordinate space of the first camera of the stereo pair.</dd>
  11343. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter description
  11344. of previous output parameter ( rvecs ).
  11345. <ul>
  11346. <li>
  11347. REF: fisheye::CALIB_FIX_INTRINSIC Fix K1, K2? and D1, D2? so that only R, T matrices
  11348. are estimated.
  11349. </li>
  11350. <li>
  11351. REF: fisheye::CALIB_USE_INTRINSIC_GUESS K1, K2 contains valid initial values of
  11352. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  11353. center (imageSize is used), and focal distances are computed in a least-squares fashion.
  11354. </li>
  11355. <li>
  11356. REF: fisheye::CALIB_RECOMPUTE_EXTRINSIC Extrinsic will be recomputed after each iteration
  11357. of intrinsic optimization.
  11358. </li>
  11359. <li>
  11360. REF: fisheye::CALIB_CHECK_COND The functions will check validity of condition number.
  11361. </li>
  11362. <li>
  11363. REF: fisheye::CALIB_FIX_SKEW Skew coefficient (alpha) is set to zero and stay zero.
  11364. </li>
  11365. <li>
  11366. REF: fisheye::CALIB_FIX_K1,..., REF: fisheye::CALIB_FIX_K4 Selected distortion coefficients are set to zeros and stay
  11367. zero.
  11368. </li>
  11369. </ul></dd>
  11370. <dt><span class="returnLabel">Returns:</span></dt>
  11371. <dd>automatically generated</dd>
  11372. </dl>
  11373. </li>
  11374. </ul>
  11375. <a name="fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">
  11376. <!-- -->
  11377. </a>
  11378. <ul class="blockList">
  11379. <li class="blockList">
  11380. <h4>fisheye_stereoCalibrate</h4>
  11381. <pre>public static&nbsp;double&nbsp;fisheye_stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  11382. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  11383. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  11384. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11385. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11386. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11387. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11388. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11389. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11390. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  11391. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  11392. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  11393. int&nbsp;flags)</pre>
  11394. <div class="block">Performs stereo calibration</div>
  11395. <dl>
  11396. <dt><span class="paramLabel">Parameters:</span></dt>
  11397. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points.</dd>
  11398. <dd><code>imagePoints1</code> - Vector of vectors of the projections of the calibration pattern points,
  11399. observed by the first camera.</dd>
  11400. <dd><code>imagePoints2</code> - Vector of vectors of the projections of the calibration pattern points,
  11401. observed by the second camera.</dd>
  11402. <dd><code>K1</code> - Input/output first camera intrinsic matrix:
  11403. \(\vecthreethree{f_x^{(j)}}{0}{c_x^{(j)}}{0}{f_y^{(j)}}{c_y^{(j)}}{0}{0}{1}\) , \(j = 0,\, 1\) . If
  11404. any of REF: fisheye::CALIB_USE_INTRINSIC_GUESS , REF: fisheye::CALIB_FIX_INTRINSIC are specified,
  11405. some or all of the matrix components must be initialized.</dd>
  11406. <dd><code>D1</code> - Input/output vector of distortion coefficients \(\distcoeffsfisheye\) of 4 elements.</dd>
  11407. <dd><code>K2</code> - Input/output second camera intrinsic matrix. The parameter is similar to K1 .</dd>
  11408. <dd><code>D2</code> - Input/output lens distortion coefficients for the second camera. The parameter is
  11409. similar to D1 .</dd>
  11410. <dd><code>imageSize</code> - Size of the image used only to initialize camera intrinsic matrix.</dd>
  11411. <dd><code>R</code> - Output rotation matrix between the 1st and the 2nd camera coordinate systems.</dd>
  11412. <dd><code>T</code> - Output translation vector between the coordinate systems of the cameras.</dd>
  11413. <dd><code>rvecs</code> - Output vector of rotation vectors ( REF: Rodrigues ) estimated for each pattern view in the
  11414. coordinate system of the first camera of the stereo pair (e.g. std::vector&lt;cv::Mat&gt;). More in detail, each
  11415. i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
  11416. description) brings the calibration pattern from the object coordinate space (in which object points are
  11417. specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
  11418. the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
  11419. to camera coordinate space of the first camera of the stereo pair.</dd>
  11420. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter description
  11421. of previous output parameter ( rvecs ).</dd>
  11422. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  11423. <ul>
  11424. <li>
  11425. REF: fisheye::CALIB_FIX_INTRINSIC Fix K1, K2? and D1, D2? so that only R, T matrices
  11426. are estimated.
  11427. </li>
  11428. <li>
  11429. REF: fisheye::CALIB_USE_INTRINSIC_GUESS K1, K2 contains valid initial values of
  11430. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  11431. center (imageSize is used), and focal distances are computed in a least-squares fashion.
  11432. </li>
  11433. <li>
  11434. REF: fisheye::CALIB_RECOMPUTE_EXTRINSIC Extrinsic will be recomputed after each iteration
  11435. of intrinsic optimization.
  11436. </li>
  11437. <li>
  11438. REF: fisheye::CALIB_CHECK_COND The functions will check validity of condition number.
  11439. </li>
  11440. <li>
  11441. REF: fisheye::CALIB_FIX_SKEW Skew coefficient (alpha) is set to zero and stay zero.
  11442. </li>
  11443. <li>
  11444. REF: fisheye::CALIB_FIX_K1,..., REF: fisheye::CALIB_FIX_K4 Selected distortion coefficients are set to zeros and stay
  11445. zero.
  11446. </li>
  11447. </ul></dd>
  11448. <dt><span class="returnLabel">Returns:</span></dt>
  11449. <dd>automatically generated</dd>
  11450. </dl>
  11451. </li>
  11452. </ul>
  11453. <a name="fisheye_stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-org.opencv.core.TermCriteria-">
  11454. <!-- -->
  11455. </a>
  11456. <ul class="blockList">
  11457. <li class="blockList">
  11458. <h4>fisheye_stereoCalibrate</h4>
  11459. <pre>public static&nbsp;double&nbsp;fisheye_stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  11460. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  11461. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  11462. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11463. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11464. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11465. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11466. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11467. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11468. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  11469. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  11470. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  11471. int&nbsp;flags,
  11472. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  11473. <div class="block">Performs stereo calibration</div>
  11474. <dl>
  11475. <dt><span class="paramLabel">Parameters:</span></dt>
  11476. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points.</dd>
  11477. <dd><code>imagePoints1</code> - Vector of vectors of the projections of the calibration pattern points,
  11478. observed by the first camera.</dd>
  11479. <dd><code>imagePoints2</code> - Vector of vectors of the projections of the calibration pattern points,
  11480. observed by the second camera.</dd>
  11481. <dd><code>K1</code> - Input/output first camera intrinsic matrix:
  11482. \(\vecthreethree{f_x^{(j)}}{0}{c_x^{(j)}}{0}{f_y^{(j)}}{c_y^{(j)}}{0}{0}{1}\) , \(j = 0,\, 1\) . If
  11483. any of REF: fisheye::CALIB_USE_INTRINSIC_GUESS , REF: fisheye::CALIB_FIX_INTRINSIC are specified,
  11484. some or all of the matrix components must be initialized.</dd>
  11485. <dd><code>D1</code> - Input/output vector of distortion coefficients \(\distcoeffsfisheye\) of 4 elements.</dd>
  11486. <dd><code>K2</code> - Input/output second camera intrinsic matrix. The parameter is similar to K1 .</dd>
  11487. <dd><code>D2</code> - Input/output lens distortion coefficients for the second camera. The parameter is
  11488. similar to D1 .</dd>
  11489. <dd><code>imageSize</code> - Size of the image used only to initialize camera intrinsic matrix.</dd>
  11490. <dd><code>R</code> - Output rotation matrix between the 1st and the 2nd camera coordinate systems.</dd>
  11491. <dd><code>T</code> - Output translation vector between the coordinate systems of the cameras.</dd>
  11492. <dd><code>rvecs</code> - Output vector of rotation vectors ( REF: Rodrigues ) estimated for each pattern view in the
  11493. coordinate system of the first camera of the stereo pair (e.g. std::vector&lt;cv::Mat&gt;). More in detail, each
  11494. i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
  11495. description) brings the calibration pattern from the object coordinate space (in which object points are
  11496. specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
  11497. the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
  11498. to camera coordinate space of the first camera of the stereo pair.</dd>
  11499. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter description
  11500. of previous output parameter ( rvecs ).</dd>
  11501. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  11502. <ul>
  11503. <li>
  11504. REF: fisheye::CALIB_FIX_INTRINSIC Fix K1, K2? and D1, D2? so that only R, T matrices
  11505. are estimated.
  11506. </li>
  11507. <li>
  11508. REF: fisheye::CALIB_USE_INTRINSIC_GUESS K1, K2 contains valid initial values of
  11509. fx, fy, cx, cy that are optimized further. Otherwise, (cx, cy) is initially set to the image
  11510. center (imageSize is used), and focal distances are computed in a least-squares fashion.
  11511. </li>
  11512. <li>
  11513. REF: fisheye::CALIB_RECOMPUTE_EXTRINSIC Extrinsic will be recomputed after each iteration
  11514. of intrinsic optimization.
  11515. </li>
  11516. <li>
  11517. REF: fisheye::CALIB_CHECK_COND The functions will check validity of condition number.
  11518. </li>
  11519. <li>
  11520. REF: fisheye::CALIB_FIX_SKEW Skew coefficient (alpha) is set to zero and stay zero.
  11521. </li>
  11522. <li>
  11523. REF: fisheye::CALIB_FIX_K1,..., REF: fisheye::CALIB_FIX_K4 Selected distortion coefficients are set to zeros and stay
  11524. zero.
  11525. </li>
  11526. </ul></dd>
  11527. <dd><code>criteria</code> - Termination criteria for the iterative optimization algorithm.</dd>
  11528. <dt><span class="returnLabel">Returns:</span></dt>
  11529. <dd>automatically generated</dd>
  11530. </dl>
  11531. </li>
  11532. </ul>
  11533. <a name="fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  11534. <!-- -->
  11535. </a>
  11536. <ul class="blockList">
  11537. <li class="blockList">
  11538. <h4>fisheye_stereoRectify</h4>
  11539. <pre>public static&nbsp;void&nbsp;fisheye_stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11540. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11541. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11542. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11543. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11544. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11545. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11546. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  11547. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  11548. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  11549. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  11550. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  11551. int&nbsp;flags)</pre>
  11552. <div class="block">Stereo rectification for fisheye camera model</div>
  11553. <dl>
  11554. <dt><span class="paramLabel">Parameters:</span></dt>
  11555. <dd><code>K1</code> - First camera intrinsic matrix.</dd>
  11556. <dd><code>D1</code> - First camera distortion parameters.</dd>
  11557. <dd><code>K2</code> - Second camera intrinsic matrix.</dd>
  11558. <dd><code>D2</code> - Second camera distortion parameters.</dd>
  11559. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  11560. <dd><code>R</code> - Rotation matrix between the coordinate systems of the first and the second
  11561. cameras.</dd>
  11562. <dd><code>tvec</code> - Translation vector between coordinate systems of the cameras.</dd>
  11563. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera.</dd>
  11564. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera.</dd>
  11565. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  11566. camera.</dd>
  11567. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  11568. camera.</dd>
  11569. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see #reprojectImageTo3D ).</dd>
  11570. <dd><code>flags</code> - Operation flags that may be zero or REF: fisheye::CALIB_ZERO_DISPARITY . If the flag is set,
  11571. the function makes the principal points of each camera have the same pixel coordinates in the
  11572. rectified views. And if the flag is not set, the function may still shift the images in the
  11573. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  11574. useful image area.
  11575. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  11576. is passed (default), it is set to the original imageSize . Setting it to larger value can help you
  11577. preserve details in the original image, especially when there is a big radial distortion.
  11578. length. Balance is in range of [0, 1].</dd>
  11579. </dl>
  11580. </li>
  11581. </ul>
  11582. <a name="fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.Size-">
  11583. <!-- -->
  11584. </a>
  11585. <ul class="blockList">
  11586. <li class="blockList">
  11587. <h4>fisheye_stereoRectify</h4>
  11588. <pre>public static&nbsp;void&nbsp;fisheye_stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11589. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11590. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11591. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11592. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11593. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11594. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11595. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  11596. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  11597. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  11598. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  11599. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  11600. int&nbsp;flags,
  11601. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize)</pre>
  11602. <div class="block">Stereo rectification for fisheye camera model</div>
  11603. <dl>
  11604. <dt><span class="paramLabel">Parameters:</span></dt>
  11605. <dd><code>K1</code> - First camera intrinsic matrix.</dd>
  11606. <dd><code>D1</code> - First camera distortion parameters.</dd>
  11607. <dd><code>K2</code> - Second camera intrinsic matrix.</dd>
  11608. <dd><code>D2</code> - Second camera distortion parameters.</dd>
  11609. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  11610. <dd><code>R</code> - Rotation matrix between the coordinate systems of the first and the second
  11611. cameras.</dd>
  11612. <dd><code>tvec</code> - Translation vector between coordinate systems of the cameras.</dd>
  11613. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera.</dd>
  11614. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera.</dd>
  11615. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  11616. camera.</dd>
  11617. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  11618. camera.</dd>
  11619. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see #reprojectImageTo3D ).</dd>
  11620. <dd><code>flags</code> - Operation flags that may be zero or REF: fisheye::CALIB_ZERO_DISPARITY . If the flag is set,
  11621. the function makes the principal points of each camera have the same pixel coordinates in the
  11622. rectified views. And if the flag is not set, the function may still shift the images in the
  11623. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  11624. useful image area.</dd>
  11625. <dd><code>newImageSize</code> - New image resolution after rectification. The same size should be passed to
  11626. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  11627. is passed (default), it is set to the original imageSize . Setting it to larger value can help you
  11628. preserve details in the original image, especially when there is a big radial distortion.
  11629. length. Balance is in range of [0, 1].</dd>
  11630. </dl>
  11631. </li>
  11632. </ul>
  11633. <a name="fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.Size-double-">
  11634. <!-- -->
  11635. </a>
  11636. <ul class="blockList">
  11637. <li class="blockList">
  11638. <h4>fisheye_stereoRectify</h4>
  11639. <pre>public static&nbsp;void&nbsp;fisheye_stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11640. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11641. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11642. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11643. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11644. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11645. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11646. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  11647. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  11648. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  11649. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  11650. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  11651. int&nbsp;flags,
  11652. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  11653. double&nbsp;balance)</pre>
  11654. <div class="block">Stereo rectification for fisheye camera model</div>
  11655. <dl>
  11656. <dt><span class="paramLabel">Parameters:</span></dt>
  11657. <dd><code>K1</code> - First camera intrinsic matrix.</dd>
  11658. <dd><code>D1</code> - First camera distortion parameters.</dd>
  11659. <dd><code>K2</code> - Second camera intrinsic matrix.</dd>
  11660. <dd><code>D2</code> - Second camera distortion parameters.</dd>
  11661. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  11662. <dd><code>R</code> - Rotation matrix between the coordinate systems of the first and the second
  11663. cameras.</dd>
  11664. <dd><code>tvec</code> - Translation vector between coordinate systems of the cameras.</dd>
  11665. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera.</dd>
  11666. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera.</dd>
  11667. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  11668. camera.</dd>
  11669. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  11670. camera.</dd>
  11671. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see #reprojectImageTo3D ).</dd>
  11672. <dd><code>flags</code> - Operation flags that may be zero or REF: fisheye::CALIB_ZERO_DISPARITY . If the flag is set,
  11673. the function makes the principal points of each camera have the same pixel coordinates in the
  11674. rectified views. And if the flag is not set, the function may still shift the images in the
  11675. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  11676. useful image area.</dd>
  11677. <dd><code>newImageSize</code> - New image resolution after rectification. The same size should be passed to
  11678. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  11679. is passed (default), it is set to the original imageSize . Setting it to larger value can help you
  11680. preserve details in the original image, especially when there is a big radial distortion.</dd>
  11681. <dd><code>balance</code> - Sets the new focal length in range between the min focal length and the max focal
  11682. length. Balance is in range of [0, 1].</dd>
  11683. </dl>
  11684. </li>
  11685. </ul>
  11686. <a name="fisheye_stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.Size-double-double-">
  11687. <!-- -->
  11688. </a>
  11689. <ul class="blockList">
  11690. <li class="blockList">
  11691. <h4>fisheye_stereoRectify</h4>
  11692. <pre>public static&nbsp;void&nbsp;fisheye_stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K1,
  11693. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D1,
  11694. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K2,
  11695. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D2,
  11696. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  11697. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11698. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  11699. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  11700. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  11701. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  11702. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  11703. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  11704. int&nbsp;flags,
  11705. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  11706. double&nbsp;balance,
  11707. double&nbsp;fov_scale)</pre>
  11708. <div class="block">Stereo rectification for fisheye camera model</div>
  11709. <dl>
  11710. <dt><span class="paramLabel">Parameters:</span></dt>
  11711. <dd><code>K1</code> - First camera intrinsic matrix.</dd>
  11712. <dd><code>D1</code> - First camera distortion parameters.</dd>
  11713. <dd><code>K2</code> - Second camera intrinsic matrix.</dd>
  11714. <dd><code>D2</code> - Second camera distortion parameters.</dd>
  11715. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  11716. <dd><code>R</code> - Rotation matrix between the coordinate systems of the first and the second
  11717. cameras.</dd>
  11718. <dd><code>tvec</code> - Translation vector between coordinate systems of the cameras.</dd>
  11719. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera.</dd>
  11720. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera.</dd>
  11721. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  11722. camera.</dd>
  11723. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  11724. camera.</dd>
  11725. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see #reprojectImageTo3D ).</dd>
  11726. <dd><code>flags</code> - Operation flags that may be zero or REF: fisheye::CALIB_ZERO_DISPARITY . If the flag is set,
  11727. the function makes the principal points of each camera have the same pixel coordinates in the
  11728. rectified views. And if the flag is not set, the function may still shift the images in the
  11729. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  11730. useful image area.</dd>
  11731. <dd><code>newImageSize</code> - New image resolution after rectification. The same size should be passed to
  11732. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  11733. is passed (default), it is set to the original imageSize . Setting it to larger value can help you
  11734. preserve details in the original image, especially when there is a big radial distortion.</dd>
  11735. <dd><code>balance</code> - Sets the new focal length in range between the min focal length and the max focal
  11736. length. Balance is in range of [0, 1].</dd>
  11737. <dd><code>fov_scale</code> - Divisor for new focal length.</dd>
  11738. </dl>
  11739. </li>
  11740. </ul>
  11741. <a name="fisheye_undistortImage-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11742. <!-- -->
  11743. </a>
  11744. <ul class="blockList">
  11745. <li class="blockList">
  11746. <h4>fisheye_undistortImage</h4>
  11747. <pre>public static&nbsp;void&nbsp;fisheye_undistortImage(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11748. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11749. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11750. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</pre>
  11751. <div class="block">Transforms an image to compensate for fisheye lens distortion.</div>
  11752. <dl>
  11753. <dt><span class="paramLabel">Parameters:</span></dt>
  11754. <dd><code>distorted</code> - image with fisheye lens distortion.</dd>
  11755. <dd><code>undistorted</code> - Output image with compensated fisheye lens distortion.</dd>
  11756. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11757. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).
  11758. may additionally scale and shift the result by using a different matrix.
  11759. The function transforms an image to compensate radial and tangential lens distortion.
  11760. The function is simply a combination of #fisheye::initUndistortRectifyMap (with unity R ) and #remap
  11761. (with bilinear interpolation). See the former function for details of the transformation being
  11762. performed.
  11763. See below the results of undistortImage.
  11764. <ul>
  11765. <li>
  11766. a\) result of undistort of perspective camera model (all possible coefficients (k_1, k_2, k_3,
  11767. k_4, k_5, k_6) of distortion were optimized under calibration)
  11768. <ul>
  11769. <li>
  11770. b\) result of #fisheye::undistortImage of fisheye camera model (all possible coefficients (k_1, k_2,
  11771. k_3, k_4) of fisheye distortion were optimized under calibration)
  11772. </li>
  11773. <li>
  11774. c\) original image was captured with fisheye lens
  11775. </li>
  11776. </ul>
  11777. Pictures a) and b) almost the same. But if we consider points of image located far from the center
  11778. of image, we can notice that on image a) these points are distorted.
  11779. </li>
  11780. </ul>
  11781. ![image](pics/fisheye_undistorted.jpg)</dd>
  11782. </dl>
  11783. </li>
  11784. </ul>
  11785. <a name="fisheye_undistortImage-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11786. <!-- -->
  11787. </a>
  11788. <ul class="blockList">
  11789. <li class="blockList">
  11790. <h4>fisheye_undistortImage</h4>
  11791. <pre>public static&nbsp;void&nbsp;fisheye_undistortImage(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11792. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11793. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11794. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11795. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Knew)</pre>
  11796. <div class="block">Transforms an image to compensate for fisheye lens distortion.</div>
  11797. <dl>
  11798. <dt><span class="paramLabel">Parameters:</span></dt>
  11799. <dd><code>distorted</code> - image with fisheye lens distortion.</dd>
  11800. <dd><code>undistorted</code> - Output image with compensated fisheye lens distortion.</dd>
  11801. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11802. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11803. <dd><code>Knew</code> - Camera intrinsic matrix of the distorted image. By default, it is the identity matrix but you
  11804. may additionally scale and shift the result by using a different matrix.
  11805. The function transforms an image to compensate radial and tangential lens distortion.
  11806. The function is simply a combination of #fisheye::initUndistortRectifyMap (with unity R ) and #remap
  11807. (with bilinear interpolation). See the former function for details of the transformation being
  11808. performed.
  11809. See below the results of undistortImage.
  11810. <ul>
  11811. <li>
  11812. a\) result of undistort of perspective camera model (all possible coefficients (k_1, k_2, k_3,
  11813. k_4, k_5, k_6) of distortion were optimized under calibration)
  11814. <ul>
  11815. <li>
  11816. b\) result of #fisheye::undistortImage of fisheye camera model (all possible coefficients (k_1, k_2,
  11817. k_3, k_4) of fisheye distortion were optimized under calibration)
  11818. </li>
  11819. <li>
  11820. c\) original image was captured with fisheye lens
  11821. </li>
  11822. </ul>
  11823. Pictures a) and b) almost the same. But if we consider points of image located far from the center
  11824. of image, we can notice that on image a) these points are distorted.
  11825. </li>
  11826. </ul>
  11827. ![image](pics/fisheye_undistorted.jpg)</dd>
  11828. </dl>
  11829. </li>
  11830. </ul>
  11831. <a name="fisheye_undistortImage-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-">
  11832. <!-- -->
  11833. </a>
  11834. <ul class="blockList">
  11835. <li class="blockList">
  11836. <h4>fisheye_undistortImage</h4>
  11837. <pre>public static&nbsp;void&nbsp;fisheye_undistortImage(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11838. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11839. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11840. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11841. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Knew,
  11842. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;new_size)</pre>
  11843. <div class="block">Transforms an image to compensate for fisheye lens distortion.</div>
  11844. <dl>
  11845. <dt><span class="paramLabel">Parameters:</span></dt>
  11846. <dd><code>distorted</code> - image with fisheye lens distortion.</dd>
  11847. <dd><code>undistorted</code> - Output image with compensated fisheye lens distortion.</dd>
  11848. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11849. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11850. <dd><code>Knew</code> - Camera intrinsic matrix of the distorted image. By default, it is the identity matrix but you
  11851. may additionally scale and shift the result by using a different matrix.</dd>
  11852. <dd><code>new_size</code> - the new size
  11853. The function transforms an image to compensate radial and tangential lens distortion.
  11854. The function is simply a combination of #fisheye::initUndistortRectifyMap (with unity R ) and #remap
  11855. (with bilinear interpolation). See the former function for details of the transformation being
  11856. performed.
  11857. See below the results of undistortImage.
  11858. <ul>
  11859. <li>
  11860. a\) result of undistort of perspective camera model (all possible coefficients (k_1, k_2, k_3,
  11861. k_4, k_5, k_6) of distortion were optimized under calibration)
  11862. <ul>
  11863. <li>
  11864. b\) result of #fisheye::undistortImage of fisheye camera model (all possible coefficients (k_1, k_2,
  11865. k_3, k_4) of fisheye distortion were optimized under calibration)
  11866. </li>
  11867. <li>
  11868. c\) original image was captured with fisheye lens
  11869. </li>
  11870. </ul>
  11871. Pictures a) and b) almost the same. But if we consider points of image located far from the center
  11872. of image, we can notice that on image a) these points are distorted.
  11873. </li>
  11874. </ul>
  11875. ![image](pics/fisheye_undistorted.jpg)</dd>
  11876. </dl>
  11877. </li>
  11878. </ul>
  11879. <a name="fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11880. <!-- -->
  11881. </a>
  11882. <ul class="blockList">
  11883. <li class="blockList">
  11884. <h4>fisheye_undistortPoints</h4>
  11885. <pre>public static&nbsp;void&nbsp;fisheye_undistortPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11886. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11887. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11888. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D)</pre>
  11889. <div class="block">Undistorts 2D points using fisheye model</div>
  11890. <dl>
  11891. <dt><span class="paramLabel">Parameters:</span></dt>
  11892. <dd><code>distorted</code> - Array of object points, 1xN/Nx1 2-channel (or vector&lt;Point2f&gt; ), where N is the
  11893. number of points in the view.</dd>
  11894. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11895. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).
  11896. 1-channel or 1x1 3-channel</dd>
  11897. <dd><code>undistorted</code> - Output array of image points, 1xN/Nx1 2-channel, or vector&lt;Point2f&gt; .</dd>
  11898. </dl>
  11899. </li>
  11900. </ul>
  11901. <a name="fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11902. <!-- -->
  11903. </a>
  11904. <ul class="blockList">
  11905. <li class="blockList">
  11906. <h4>fisheye_undistortPoints</h4>
  11907. <pre>public static&nbsp;void&nbsp;fisheye_undistortPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11908. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11909. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11910. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11911. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R)</pre>
  11912. <div class="block">Undistorts 2D points using fisheye model</div>
  11913. <dl>
  11914. <dt><span class="paramLabel">Parameters:</span></dt>
  11915. <dd><code>distorted</code> - Array of object points, 1xN/Nx1 2-channel (or vector&lt;Point2f&gt; ), where N is the
  11916. number of points in the view.</dd>
  11917. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11918. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11919. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11920. 1-channel or 1x1 3-channel</dd>
  11921. <dd><code>undistorted</code> - Output array of image points, 1xN/Nx1 2-channel, or vector&lt;Point2f&gt; .</dd>
  11922. </dl>
  11923. </li>
  11924. </ul>
  11925. <a name="fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  11926. <!-- -->
  11927. </a>
  11928. <ul class="blockList">
  11929. <li class="blockList">
  11930. <h4>fisheye_undistortPoints</h4>
  11931. <pre>public static&nbsp;void&nbsp;fisheye_undistortPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11932. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11933. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11934. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11935. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11936. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P)</pre>
  11937. <div class="block">Undistorts 2D points using fisheye model</div>
  11938. <dl>
  11939. <dt><span class="paramLabel">Parameters:</span></dt>
  11940. <dd><code>distorted</code> - Array of object points, 1xN/Nx1 2-channel (or vector&lt;Point2f&gt; ), where N is the
  11941. number of points in the view.</dd>
  11942. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11943. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11944. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11945. 1-channel or 1x1 3-channel</dd>
  11946. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)</dd>
  11947. <dd><code>undistorted</code> - Output array of image points, 1xN/Nx1 2-channel, or vector&lt;Point2f&gt; .</dd>
  11948. </dl>
  11949. </li>
  11950. </ul>
  11951. <a name="fisheye_undistortPoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">
  11952. <!-- -->
  11953. </a>
  11954. <ul class="blockList">
  11955. <li class="blockList">
  11956. <h4>fisheye_undistortPoints</h4>
  11957. <pre>public static&nbsp;void&nbsp;fisheye_undistortPoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distorted,
  11958. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;undistorted,
  11959. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;K,
  11960. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;D,
  11961. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  11962. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  11963. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  11964. <div class="block">Undistorts 2D points using fisheye model</div>
  11965. <dl>
  11966. <dt><span class="paramLabel">Parameters:</span></dt>
  11967. <dd><code>distorted</code> - Array of object points, 1xN/Nx1 2-channel (or vector&lt;Point2f&gt; ), where N is the
  11968. number of points in the view.</dd>
  11969. <dd><code>K</code> - Camera intrinsic matrix \(cameramatrix{K}\).</dd>
  11970. <dd><code>D</code> - Input vector of distortion coefficients \(\distcoeffsfisheye\).</dd>
  11971. <dd><code>R</code> - Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
  11972. 1-channel or 1x1 3-channel</dd>
  11973. <dd><code>P</code> - New camera intrinsic matrix (3x3) or new projection matrix (3x4)</dd>
  11974. <dd><code>criteria</code> - Termination criteria</dd>
  11975. <dd><code>undistorted</code> - Output array of image points, 1xN/Nx1 2-channel, or vector&lt;Point2f&gt; .</dd>
  11976. </dl>
  11977. </li>
  11978. </ul>
  11979. <a name="getDefaultNewCameraMatrix-org.opencv.core.Mat-">
  11980. <!-- -->
  11981. </a>
  11982. <ul class="blockList">
  11983. <li class="blockList">
  11984. <h4>getDefaultNewCameraMatrix</h4>
  11985. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getDefaultNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix)</pre>
  11986. <div class="block">Returns the default new camera matrix.
  11987. The function returns the camera matrix that is either an exact copy of the input cameraMatrix (when
  11988. centerPrinicipalPoint=false ), or the modified one (when centerPrincipalPoint=true).
  11989. In the latter case, the new camera matrix will be:
  11990. \(\begin{bmatrix} f_x &amp;&amp; 0 &amp;&amp; ( \texttt{imgSize.width} -1)*0.5 \\ 0 &amp;&amp; f_y &amp;&amp; ( \texttt{imgSize.height} -1)*0.5 \\ 0 &amp;&amp; 0 &amp;&amp; 1 \end{bmatrix} ,\)
  11991. where \(f_x\) and \(f_y\) are \((0,0)\) and \((1,1)\) elements of cameraMatrix, respectively.
  11992. By default, the undistortion functions in OpenCV (see #initUndistortRectifyMap, #undistort) do not
  11993. move the principal point. However, when you work with stereo, it is important to move the principal
  11994. points in both views to the same y-coordinate (which is required by most of stereo correspondence
  11995. algorithms), and may be to the same x-coordinate too. So, you can form the new camera matrix for
  11996. each view where the principal points are located at the center.</div>
  11997. <dl>
  11998. <dt><span class="paramLabel">Parameters:</span></dt>
  11999. <dd><code>cameraMatrix</code> - Input camera matrix.
  12000. parameter indicates whether this location should be at the image center or not.</dd>
  12001. <dt><span class="returnLabel">Returns:</span></dt>
  12002. <dd>automatically generated</dd>
  12003. </dl>
  12004. </li>
  12005. </ul>
  12006. <a name="getDefaultNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Size-">
  12007. <!-- -->
  12008. </a>
  12009. <ul class="blockList">
  12010. <li class="blockList">
  12011. <h4>getDefaultNewCameraMatrix</h4>
  12012. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getDefaultNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12013. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgsize)</pre>
  12014. <div class="block">Returns the default new camera matrix.
  12015. The function returns the camera matrix that is either an exact copy of the input cameraMatrix (when
  12016. centerPrinicipalPoint=false ), or the modified one (when centerPrincipalPoint=true).
  12017. In the latter case, the new camera matrix will be:
  12018. \(\begin{bmatrix} f_x &amp;&amp; 0 &amp;&amp; ( \texttt{imgSize.width} -1)*0.5 \\ 0 &amp;&amp; f_y &amp;&amp; ( \texttt{imgSize.height} -1)*0.5 \\ 0 &amp;&amp; 0 &amp;&amp; 1 \end{bmatrix} ,\)
  12019. where \(f_x\) and \(f_y\) are \((0,0)\) and \((1,1)\) elements of cameraMatrix, respectively.
  12020. By default, the undistortion functions in OpenCV (see #initUndistortRectifyMap, #undistort) do not
  12021. move the principal point. However, when you work with stereo, it is important to move the principal
  12022. points in both views to the same y-coordinate (which is required by most of stereo correspondence
  12023. algorithms), and may be to the same x-coordinate too. So, you can form the new camera matrix for
  12024. each view where the principal points are located at the center.</div>
  12025. <dl>
  12026. <dt><span class="paramLabel">Parameters:</span></dt>
  12027. <dd><code>cameraMatrix</code> - Input camera matrix.</dd>
  12028. <dd><code>imgsize</code> - Camera view image size in pixels.
  12029. parameter indicates whether this location should be at the image center or not.</dd>
  12030. <dt><span class="returnLabel">Returns:</span></dt>
  12031. <dd>automatically generated</dd>
  12032. </dl>
  12033. </li>
  12034. </ul>
  12035. <a name="getDefaultNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Size-boolean-">
  12036. <!-- -->
  12037. </a>
  12038. <ul class="blockList">
  12039. <li class="blockList">
  12040. <h4>getDefaultNewCameraMatrix</h4>
  12041. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getDefaultNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12042. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgsize,
  12043. boolean&nbsp;centerPrincipalPoint)</pre>
  12044. <div class="block">Returns the default new camera matrix.
  12045. The function returns the camera matrix that is either an exact copy of the input cameraMatrix (when
  12046. centerPrinicipalPoint=false ), or the modified one (when centerPrincipalPoint=true).
  12047. In the latter case, the new camera matrix will be:
  12048. \(\begin{bmatrix} f_x &amp;&amp; 0 &amp;&amp; ( \texttt{imgSize.width} -1)*0.5 \\ 0 &amp;&amp; f_y &amp;&amp; ( \texttt{imgSize.height} -1)*0.5 \\ 0 &amp;&amp; 0 &amp;&amp; 1 \end{bmatrix} ,\)
  12049. where \(f_x\) and \(f_y\) are \((0,0)\) and \((1,1)\) elements of cameraMatrix, respectively.
  12050. By default, the undistortion functions in OpenCV (see #initUndistortRectifyMap, #undistort) do not
  12051. move the principal point. However, when you work with stereo, it is important to move the principal
  12052. points in both views to the same y-coordinate (which is required by most of stereo correspondence
  12053. algorithms), and may be to the same x-coordinate too. So, you can form the new camera matrix for
  12054. each view where the principal points are located at the center.</div>
  12055. <dl>
  12056. <dt><span class="paramLabel">Parameters:</span></dt>
  12057. <dd><code>cameraMatrix</code> - Input camera matrix.</dd>
  12058. <dd><code>imgsize</code> - Camera view image size in pixels.</dd>
  12059. <dd><code>centerPrincipalPoint</code> - Location of the principal point in the new camera matrix. The
  12060. parameter indicates whether this location should be at the image center or not.</dd>
  12061. <dt><span class="returnLabel">Returns:</span></dt>
  12062. <dd>automatically generated</dd>
  12063. </dl>
  12064. </li>
  12065. </ul>
  12066. <a name="getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-">
  12067. <!-- -->
  12068. </a>
  12069. <ul class="blockList">
  12070. <li class="blockList">
  12071. <h4>getOptimalNewCameraMatrix</h4>
  12072. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getOptimalNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12073. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  12074. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  12075. double&nbsp;alpha)</pre>
  12076. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  12077. <dl>
  12078. <dt><span class="paramLabel">Parameters:</span></dt>
  12079. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix.</dd>
  12080. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12081. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  12082. assumed.</dd>
  12083. <dd><code>imageSize</code> - Original image size.</dd>
  12084. <dd><code>alpha</code> - Free scaling parameter between 0 (when all the pixels in the undistorted image are
  12085. valid) and 1 (when all the source image pixels are retained in the undistorted image). See
  12086. #stereoRectify for details.
  12087. undistorted image. See roi1, roi2 description in #stereoRectify .
  12088. principal point should be at the image center or not. By default, the principal point is chosen to
  12089. best fit a subset of the source image (determined by alpha) to the corrected image.</dd>
  12090. <dt><span class="returnLabel">Returns:</span></dt>
  12091. <dd>new_camera_matrix Output new camera intrinsic matrix.
  12092. The function computes and returns the optimal new camera intrinsic matrix based on the free scaling parameter.
  12093. By varying this parameter, you may retrieve only sensible pixels alpha=0 , keep all the original
  12094. image pixels if there is valuable information in the corners alpha=1 , or get something in between.
  12095. When alpha&gt;0 , the undistorted result is likely to have some black pixels corresponding to
  12096. "virtual" pixels outside of the captured distorted image. The original camera intrinsic matrix, distortion
  12097. coefficients, the computed new camera intrinsic matrix, and newImageSize should be passed to
  12098. #initUndistortRectifyMap to produce the maps for #remap .</dd>
  12099. </dl>
  12100. </li>
  12101. </ul>
  12102. <a name="getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-org.opencv.core.Size-">
  12103. <!-- -->
  12104. </a>
  12105. <ul class="blockList">
  12106. <li class="blockList">
  12107. <h4>getOptimalNewCameraMatrix</h4>
  12108. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getOptimalNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12109. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  12110. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  12111. double&nbsp;alpha,
  12112. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize)</pre>
  12113. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  12114. <dl>
  12115. <dt><span class="paramLabel">Parameters:</span></dt>
  12116. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix.</dd>
  12117. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12118. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  12119. assumed.</dd>
  12120. <dd><code>imageSize</code> - Original image size.</dd>
  12121. <dd><code>alpha</code> - Free scaling parameter between 0 (when all the pixels in the undistorted image are
  12122. valid) and 1 (when all the source image pixels are retained in the undistorted image). See
  12123. #stereoRectify for details.</dd>
  12124. <dd><code>newImgSize</code> - Image size after rectification. By default, it is set to imageSize .
  12125. undistorted image. See roi1, roi2 description in #stereoRectify .
  12126. principal point should be at the image center or not. By default, the principal point is chosen to
  12127. best fit a subset of the source image (determined by alpha) to the corrected image.</dd>
  12128. <dt><span class="returnLabel">Returns:</span></dt>
  12129. <dd>new_camera_matrix Output new camera intrinsic matrix.
  12130. The function computes and returns the optimal new camera intrinsic matrix based on the free scaling parameter.
  12131. By varying this parameter, you may retrieve only sensible pixels alpha=0 , keep all the original
  12132. image pixels if there is valuable information in the corners alpha=1 , or get something in between.
  12133. When alpha&gt;0 , the undistorted result is likely to have some black pixels corresponding to
  12134. "virtual" pixels outside of the captured distorted image. The original camera intrinsic matrix, distortion
  12135. coefficients, the computed new camera intrinsic matrix, and newImageSize should be passed to
  12136. #initUndistortRectifyMap to produce the maps for #remap .</dd>
  12137. </dl>
  12138. </li>
  12139. </ul>
  12140. <a name="getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-org.opencv.core.Size-org.opencv.core.Rect-">
  12141. <!-- -->
  12142. </a>
  12143. <ul class="blockList">
  12144. <li class="blockList">
  12145. <h4>getOptimalNewCameraMatrix</h4>
  12146. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getOptimalNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12147. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  12148. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  12149. double&nbsp;alpha,
  12150. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize,
  12151. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI)</pre>
  12152. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  12153. <dl>
  12154. <dt><span class="paramLabel">Parameters:</span></dt>
  12155. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix.</dd>
  12156. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12157. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  12158. assumed.</dd>
  12159. <dd><code>imageSize</code> - Original image size.</dd>
  12160. <dd><code>alpha</code> - Free scaling parameter between 0 (when all the pixels in the undistorted image are
  12161. valid) and 1 (when all the source image pixels are retained in the undistorted image). See
  12162. #stereoRectify for details.</dd>
  12163. <dd><code>newImgSize</code> - Image size after rectification. By default, it is set to imageSize .</dd>
  12164. <dd><code>validPixROI</code> - Optional output rectangle that outlines all-good-pixels region in the
  12165. undistorted image. See roi1, roi2 description in #stereoRectify .
  12166. principal point should be at the image center or not. By default, the principal point is chosen to
  12167. best fit a subset of the source image (determined by alpha) to the corrected image.</dd>
  12168. <dt><span class="returnLabel">Returns:</span></dt>
  12169. <dd>new_camera_matrix Output new camera intrinsic matrix.
  12170. The function computes and returns the optimal new camera intrinsic matrix based on the free scaling parameter.
  12171. By varying this parameter, you may retrieve only sensible pixels alpha=0 , keep all the original
  12172. image pixels if there is valuable information in the corners alpha=1 , or get something in between.
  12173. When alpha&gt;0 , the undistorted result is likely to have some black pixels corresponding to
  12174. "virtual" pixels outside of the captured distorted image. The original camera intrinsic matrix, distortion
  12175. coefficients, the computed new camera intrinsic matrix, and newImageSize should be passed to
  12176. #initUndistortRectifyMap to produce the maps for #remap .</dd>
  12177. </dl>
  12178. </li>
  12179. </ul>
  12180. <a name="getOptimalNewCameraMatrix-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-double-org.opencv.core.Size-org.opencv.core.Rect-boolean-">
  12181. <!-- -->
  12182. </a>
  12183. <ul class="blockList">
  12184. <li class="blockList">
  12185. <h4>getOptimalNewCameraMatrix</h4>
  12186. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;getOptimalNewCameraMatrix(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12187. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  12188. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  12189. double&nbsp;alpha,
  12190. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize,
  12191. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI,
  12192. boolean&nbsp;centerPrincipalPoint)</pre>
  12193. <div class="block">Returns the new camera intrinsic matrix based on the free scaling parameter.</div>
  12194. <dl>
  12195. <dt><span class="paramLabel">Parameters:</span></dt>
  12196. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix.</dd>
  12197. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12198. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  12199. assumed.</dd>
  12200. <dd><code>imageSize</code> - Original image size.</dd>
  12201. <dd><code>alpha</code> - Free scaling parameter between 0 (when all the pixels in the undistorted image are
  12202. valid) and 1 (when all the source image pixels are retained in the undistorted image). See
  12203. #stereoRectify for details.</dd>
  12204. <dd><code>newImgSize</code> - Image size after rectification. By default, it is set to imageSize .</dd>
  12205. <dd><code>validPixROI</code> - Optional output rectangle that outlines all-good-pixels region in the
  12206. undistorted image. See roi1, roi2 description in #stereoRectify .</dd>
  12207. <dd><code>centerPrincipalPoint</code> - Optional flag that indicates whether in the new camera intrinsic matrix the
  12208. principal point should be at the image center or not. By default, the principal point is chosen to
  12209. best fit a subset of the source image (determined by alpha) to the corrected image.</dd>
  12210. <dt><span class="returnLabel">Returns:</span></dt>
  12211. <dd>new_camera_matrix Output new camera intrinsic matrix.
  12212. The function computes and returns the optimal new camera intrinsic matrix based on the free scaling parameter.
  12213. By varying this parameter, you may retrieve only sensible pixels alpha=0 , keep all the original
  12214. image pixels if there is valuable information in the corners alpha=1 , or get something in between.
  12215. When alpha&gt;0 , the undistorted result is likely to have some black pixels corresponding to
  12216. "virtual" pixels outside of the captured distorted image. The original camera intrinsic matrix, distortion
  12217. coefficients, the computed new camera intrinsic matrix, and newImageSize should be passed to
  12218. #initUndistortRectifyMap to produce the maps for #remap .</dd>
  12219. </dl>
  12220. </li>
  12221. </ul>
  12222. <a name="getValidDisparityROI-org.opencv.core.Rect-org.opencv.core.Rect-int-int-int-">
  12223. <!-- -->
  12224. </a>
  12225. <ul class="blockList">
  12226. <li class="blockList">
  12227. <h4>getValidDisparityROI</h4>
  12228. <pre>public static&nbsp;<a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;getValidDisparityROI(<a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi1,
  12229. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi2,
  12230. int&nbsp;minDisparity,
  12231. int&nbsp;numberOfDisparities,
  12232. int&nbsp;blockSize)</pre>
  12233. </li>
  12234. </ul>
  12235. <a name="initCameraMatrix2D-java.util.List-java.util.List-org.opencv.core.Size-">
  12236. <!-- -->
  12237. </a>
  12238. <ul class="blockList">
  12239. <li class="blockList">
  12240. <h4>initCameraMatrix2D</h4>
  12241. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;initCameraMatrix2D(java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&gt;&nbsp;objectPoints,
  12242. java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&gt;&nbsp;imagePoints,
  12243. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize)</pre>
  12244. <div class="block">Finds an initial camera intrinsic matrix from 3D-2D point correspondences.</div>
  12245. <dl>
  12246. <dt><span class="paramLabel">Parameters:</span></dt>
  12247. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points in the calibration pattern
  12248. coordinate space. In the old interface all the per-view vectors are concatenated. See
  12249. #calibrateCamera for details.</dd>
  12250. <dd><code>imagePoints</code> - Vector of vectors of the projections of the calibration pattern points. In the
  12251. old interface all the per-view vectors are concatenated.</dd>
  12252. <dd><code>imageSize</code> - Image size in pixels used to initialize the principal point.
  12253. Otherwise, \(f_x = f_y \cdot \texttt{aspectRatio}\) .
  12254. The function estimates and returns an initial camera intrinsic matrix for the camera calibration process.
  12255. Currently, the function only supports planar calibration patterns, which are patterns where each
  12256. object point has z-coordinate =0.</dd>
  12257. <dt><span class="returnLabel">Returns:</span></dt>
  12258. <dd>automatically generated</dd>
  12259. </dl>
  12260. </li>
  12261. </ul>
  12262. <a name="initCameraMatrix2D-java.util.List-java.util.List-org.opencv.core.Size-double-">
  12263. <!-- -->
  12264. </a>
  12265. <ul class="blockList">
  12266. <li class="blockList">
  12267. <h4>initCameraMatrix2D</h4>
  12268. <pre>public static&nbsp;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;initCameraMatrix2D(java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&gt;&nbsp;objectPoints,
  12269. java.util.List&lt;<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&gt;&nbsp;imagePoints,
  12270. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  12271. double&nbsp;aspectRatio)</pre>
  12272. <div class="block">Finds an initial camera intrinsic matrix from 3D-2D point correspondences.</div>
  12273. <dl>
  12274. <dt><span class="paramLabel">Parameters:</span></dt>
  12275. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points in the calibration pattern
  12276. coordinate space. In the old interface all the per-view vectors are concatenated. See
  12277. #calibrateCamera for details.</dd>
  12278. <dd><code>imagePoints</code> - Vector of vectors of the projections of the calibration pattern points. In the
  12279. old interface all the per-view vectors are concatenated.</dd>
  12280. <dd><code>imageSize</code> - Image size in pixels used to initialize the principal point.</dd>
  12281. <dd><code>aspectRatio</code> - If it is zero or negative, both \(f_x\) and \(f_y\) are estimated independently.
  12282. Otherwise, \(f_x = f_y \cdot \texttt{aspectRatio}\) .
  12283. The function estimates and returns an initial camera intrinsic matrix for the camera calibration process.
  12284. Currently, the function only supports planar calibration patterns, which are patterns where each
  12285. object point has z-coordinate =0.</dd>
  12286. <dt><span class="returnLabel">Returns:</span></dt>
  12287. <dd>automatically generated</dd>
  12288. </dl>
  12289. </li>
  12290. </ul>
  12291. <a name="initInverseRectificationMap-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-">
  12292. <!-- -->
  12293. </a>
  12294. <ul class="blockList">
  12295. <li class="blockList">
  12296. <h4>initInverseRectificationMap</h4>
  12297. <pre>public static&nbsp;void&nbsp;initInverseRectificationMap(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12298. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  12299. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12300. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newCameraMatrix,
  12301. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size,
  12302. int&nbsp;m1type,
  12303. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map1,
  12304. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map2)</pre>
  12305. <div class="block">Computes the projection and inverse-rectification transformation map. In essense, this is the inverse of
  12306. #initUndistortRectifyMap to accomodate stereo-rectification of projectors ('inverse-cameras') in projector-camera pairs.
  12307. The function computes the joint projection and inverse rectification transformation and represents the
  12308. result in the form of maps for #remap. The projected image looks like a distorted version of the original which,
  12309. once projected by a projector, should visually match the original. In case of a monocular camera, newCameraMatrix
  12310. is usually equal to cameraMatrix, or it can be computed by
  12311. #getOptimalNewCameraMatrix for a better control over scaling. In case of a projector-camera pair,
  12312. newCameraMatrix is normally set to P1 or P2 computed by #stereoRectify .
  12313. The projector is oriented differently in the coordinate space, according to R. In case of projector-camera pairs,
  12314. this helps align the projector (in the same manner as #initUndistortRectifyMap for the camera) to create a stereo-rectified pair. This
  12315. allows epipolar lines on both images to become horizontal and have the same y-coordinate (in case of a horizontally aligned projector-camera pair).
  12316. The function builds the maps for the inverse mapping algorithm that is used by #remap. That
  12317. is, for each pixel \((u, v)\) in the destination (projected and inverse-rectified) image, the function
  12318. computes the corresponding coordinates in the source image (that is, in the original digital image). The following process is applied:
  12319. \(
  12320. \begin{array}{l}
  12321. \text{newCameraMatrix}\\
  12322. x \leftarrow (u - {c'}_x)/{f'}_x \\
  12323. y \leftarrow (v - {c'}_y)/{f'}_y \\
  12324. \\\text{Undistortion}
  12325. \\\scriptsize{\textit{though equation shown is for radial undistortion, function implements cv::undistortPoints()}}\\
  12326. r^2 \leftarrow x^2 + y^2 \\
  12327. \theta \leftarrow \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6}\\
  12328. x' \leftarrow \frac{x}{\theta} \\
  12329. y' \leftarrow \frac{y}{\theta} \\
  12330. \\\text{Rectification}\\
  12331. {[X\,Y\,W]} ^T \leftarrow R*[x' \, y' \, 1]^T \\
  12332. x'' \leftarrow X/W \\
  12333. y'' \leftarrow Y/W \\
  12334. \\\text{cameraMatrix}\\
  12335. map_x(u,v) \leftarrow x'' f_x + c_x \\
  12336. map_y(u,v) \leftarrow y'' f_y + c_y
  12337. \end{array}
  12338. \)
  12339. where \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  12340. are the distortion coefficients vector distCoeffs.
  12341. In case of a stereo-rectified projector-camera pair, this function is called for the projector while #initUndistortRectifyMap is called for the camera head.
  12342. This is done after #stereoRectify, which in turn is called after #stereoCalibrate. If the projector-camera pair
  12343. is not calibrated, it is still possible to compute the rectification transformations directly from
  12344. the fundamental matrix using #stereoRectifyUncalibrated. For the projector and camera, the function computes
  12345. homography H as the rectification transformation in a pixel domain, not a rotation matrix R in 3D
  12346. space. R can be computed from H as
  12347. \(\texttt{R} = \texttt{cameraMatrix} ^{-1} \cdot \texttt{H} \cdot \texttt{cameraMatrix}\)
  12348. where cameraMatrix can be chosen arbitrarily.</div>
  12349. <dl>
  12350. <dt><span class="paramLabel">Parameters:</span></dt>
  12351. <dd><code>cameraMatrix</code> - Input camera matrix \(A=\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  12352. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12353. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  12354. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  12355. <dd><code>R</code> - Optional rectification transformation in the object space (3x3 matrix). R1 or R2,
  12356. computed by #stereoRectify can be passed here. If the matrix is empty, the identity transformation
  12357. is assumed.</dd>
  12358. <dd><code>newCameraMatrix</code> - New camera matrix \(A'=\vecthreethree{f_x'}{0}{c_x'}{0}{f_y'}{c_y'}{0}{0}{1}\).</dd>
  12359. <dd><code>size</code> - Distorted image size.</dd>
  12360. <dd><code>m1type</code> - Type of the first output map. Can be CV_32FC1, CV_32FC2 or CV_16SC2, see #convertMaps</dd>
  12361. <dd><code>map1</code> - The first output map for #remap.</dd>
  12362. <dd><code>map2</code> - The second output map for #remap.</dd>
  12363. </dl>
  12364. </li>
  12365. </ul>
  12366. <a name="initUndistortRectifyMap-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-int-org.opencv.core.Mat-org.opencv.core.Mat-">
  12367. <!-- -->
  12368. </a>
  12369. <ul class="blockList">
  12370. <li class="blockList">
  12371. <h4>initUndistortRectifyMap</h4>
  12372. <pre>public static&nbsp;void&nbsp;initUndistortRectifyMap(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12373. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  12374. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12375. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newCameraMatrix,
  12376. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;size,
  12377. int&nbsp;m1type,
  12378. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map1,
  12379. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;map2)</pre>
  12380. <div class="block">Computes the undistortion and rectification transformation map.
  12381. The function computes the joint undistortion and rectification transformation and represents the
  12382. result in the form of maps for #remap. The undistorted image looks like original, as if it is
  12383. captured with a camera using the camera matrix =newCameraMatrix and zero distortion. In case of a
  12384. monocular camera, newCameraMatrix is usually equal to cameraMatrix, or it can be computed by
  12385. #getOptimalNewCameraMatrix for a better control over scaling. In case of a stereo camera,
  12386. newCameraMatrix is normally set to P1 or P2 computed by #stereoRectify .
  12387. Also, this new camera is oriented differently in the coordinate space, according to R. That, for
  12388. example, helps to align two heads of a stereo camera so that the epipolar lines on both images
  12389. become horizontal and have the same y- coordinate (in case of a horizontally aligned stereo camera).
  12390. The function actually builds the maps for the inverse mapping algorithm that is used by #remap. That
  12391. is, for each pixel \((u, v)\) in the destination (corrected and rectified) image, the function
  12392. computes the corresponding coordinates in the source image (that is, in the original image from
  12393. camera). The following process is applied:
  12394. \(
  12395. \begin{array}{l}
  12396. x \leftarrow (u - {c'}_x)/{f'}_x \\
  12397. y \leftarrow (v - {c'}_y)/{f'}_y \\
  12398. {[X\,Y\,W]} ^T \leftarrow R^{-1}*[x \, y \, 1]^T \\
  12399. x' \leftarrow X/W \\
  12400. y' \leftarrow Y/W \\
  12401. r^2 \leftarrow x'^2 + y'^2 \\
  12402. x'' \leftarrow x' \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6}
  12403. + 2p_1 x' y' + p_2(r^2 + 2 x'^2) + s_1 r^2 + s_2 r^4\\
  12404. y'' \leftarrow y' \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6}
  12405. + p_1 (r^2 + 2 y'^2) + 2 p_2 x' y' + s_3 r^2 + s_4 r^4 \\
  12406. s\vecthree{x'''}{y'''}{1} =
  12407. \vecthreethree{R_{33}(\tau_x, \tau_y)}{0}{-R_{13}((\tau_x, \tau_y)}
  12408. {0}{R_{33}(\tau_x, \tau_y)}{-R_{23}(\tau_x, \tau_y)}
  12409. {0}{0}{1} R(\tau_x, \tau_y) \vecthree{x''}{y''}{1}\\
  12410. map_x(u,v) \leftarrow x''' f_x + c_x \\
  12411. map_y(u,v) \leftarrow y''' f_y + c_y
  12412. \end{array}
  12413. \)
  12414. where \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  12415. are the distortion coefficients.
  12416. In case of a stereo camera, this function is called twice: once for each camera head, after
  12417. #stereoRectify, which in its turn is called after #stereoCalibrate. But if the stereo camera
  12418. was not calibrated, it is still possible to compute the rectification transformations directly from
  12419. the fundamental matrix using #stereoRectifyUncalibrated. For each camera, the function computes
  12420. homography H as the rectification transformation in a pixel domain, not a rotation matrix R in 3D
  12421. space. R can be computed from H as
  12422. \(\texttt{R} = \texttt{cameraMatrix} ^{-1} \cdot \texttt{H} \cdot \texttt{cameraMatrix}\)
  12423. where cameraMatrix can be chosen arbitrarily.</div>
  12424. <dl>
  12425. <dt><span class="paramLabel">Parameters:</span></dt>
  12426. <dd><code>cameraMatrix</code> - Input camera matrix \(A=\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  12427. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12428. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  12429. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  12430. <dd><code>R</code> - Optional rectification transformation in the object space (3x3 matrix). R1 or R2 ,
  12431. computed by #stereoRectify can be passed here. If the matrix is empty, the identity transformation
  12432. is assumed. In #initUndistortRectifyMap R assumed to be an identity matrix.</dd>
  12433. <dd><code>newCameraMatrix</code> - New camera matrix \(A'=\vecthreethree{f_x'}{0}{c_x'}{0}{f_y'}{c_y'}{0}{0}{1}\).</dd>
  12434. <dd><code>size</code> - Undistorted image size.</dd>
  12435. <dd><code>m1type</code> - Type of the first output map that can be CV_32FC1, CV_32FC2 or CV_16SC2, see #convertMaps</dd>
  12436. <dd><code>map1</code> - The first output map.</dd>
  12437. <dd><code>map2</code> - The second output map.</dd>
  12438. </dl>
  12439. </li>
  12440. </ul>
  12441. <a name="matMulDeriv-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  12442. <!-- -->
  12443. </a>
  12444. <ul class="blockList">
  12445. <li class="blockList">
  12446. <h4>matMulDeriv</h4>
  12447. <pre>public static&nbsp;void&nbsp;matMulDeriv(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;A,
  12448. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;B,
  12449. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dABdA,
  12450. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dABdB)</pre>
  12451. <div class="block">Computes partial derivatives of the matrix product for each multiplied matrix.</div>
  12452. <dl>
  12453. <dt><span class="paramLabel">Parameters:</span></dt>
  12454. <dd><code>A</code> - First multiplied matrix.</dd>
  12455. <dd><code>B</code> - Second multiplied matrix.</dd>
  12456. <dd><code>dABdA</code> - First output derivative matrix d(A\*B)/dA of size
  12457. \(\texttt{A.rows*B.cols} \times {A.rows*A.cols}\) .</dd>
  12458. <dd><code>dABdB</code> - Second output derivative matrix d(A\*B)/dB of size
  12459. \(\texttt{A.rows*B.cols} \times {B.rows*B.cols}\) .
  12460. The function computes partial derivatives of the elements of the matrix product \(A*B\) with regard to
  12461. the elements of each of the two input matrices. The function is used to compute the Jacobian
  12462. matrices in #stereoCalibrate but can also be used in any other similar optimization function.</dd>
  12463. </dl>
  12464. </li>
  12465. </ul>
  12466. <a name="projectPoints-org.opencv.core.MatOfPoint3f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.MatOfPoint2f-">
  12467. <!-- -->
  12468. </a>
  12469. <ul class="blockList">
  12470. <li class="blockList">
  12471. <h4>projectPoints</h4>
  12472. <pre>public static&nbsp;void&nbsp;projectPoints(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  12473. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  12474. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  12475. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12476. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  12477. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints)</pre>
  12478. <div class="block">Projects 3D points to an image plane.</div>
  12479. <dl>
  12480. <dt><span class="paramLabel">Parameters:</span></dt>
  12481. <dd><code>objectPoints</code> - Array of object points expressed wrt. the world coordinate frame. A 3xN/Nx3
  12482. 1-channel or 1xN/Nx1 3-channel (or vector&lt;Point3f&gt; ), where N is the number of points in the view.</dd>
  12483. <dd><code>rvec</code> - The rotation vector (REF: Rodrigues) that, together with tvec, performs a change of
  12484. basis from world to camera coordinate system, see REF: calibrateCamera for details.</dd>
  12485. <dd><code>tvec</code> - The translation vector, see parameter description above.</dd>
  12486. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  12487. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12488. \(\distcoeffs\) . If the vector is empty, the zero distortion coefficients are assumed.</dd>
  12489. <dd><code>imagePoints</code> - Output array of image points, 1xN/Nx1 2-channel, or
  12490. vector&lt;Point2f&gt; .
  12491. points with respect to components of the rotation vector, translation vector, focal lengths,
  12492. coordinates of the principal point and the distortion coefficients. In the old interface different
  12493. components of the jacobian are returned via different output parameters.
  12494. function assumes that the aspect ratio (\(f_x / f_y\)) is fixed and correspondingly adjusts the
  12495. jacobian matrix.
  12496. The function computes the 2D projections of 3D points to the image plane, given intrinsic and
  12497. extrinsic camera parameters. Optionally, the function computes Jacobians -matrices of partial
  12498. derivatives of image points coordinates (as functions of all the input parameters) with respect to
  12499. the particular parameters, intrinsic and/or extrinsic. The Jacobians are used during the global
  12500. optimization in REF: calibrateCamera, REF: solvePnP, and REF: stereoCalibrate. The function itself
  12501. can also be used to compute a re-projection error, given the current intrinsic and extrinsic
  12502. parameters.
  12503. <b>Note:</b> By setting rvec = tvec = \([0, 0, 0]\), or by setting cameraMatrix to a 3x3 identity matrix,
  12504. or by passing zero distortion coefficients, one can get various useful partial cases of the
  12505. function. This means, one can compute the distorted coordinates for a sparse set of points or apply
  12506. a perspective transformation (and also compute the derivatives) in the ideal zero-distortion setup.</dd>
  12507. </dl>
  12508. </li>
  12509. </ul>
  12510. <a name="projectPoints-org.opencv.core.MatOfPoint3f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-">
  12511. <!-- -->
  12512. </a>
  12513. <ul class="blockList">
  12514. <li class="blockList">
  12515. <h4>projectPoints</h4>
  12516. <pre>public static&nbsp;void&nbsp;projectPoints(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  12517. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  12518. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  12519. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12520. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  12521. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  12522. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian)</pre>
  12523. <div class="block">Projects 3D points to an image plane.</div>
  12524. <dl>
  12525. <dt><span class="paramLabel">Parameters:</span></dt>
  12526. <dd><code>objectPoints</code> - Array of object points expressed wrt. the world coordinate frame. A 3xN/Nx3
  12527. 1-channel or 1xN/Nx1 3-channel (or vector&lt;Point3f&gt; ), where N is the number of points in the view.</dd>
  12528. <dd><code>rvec</code> - The rotation vector (REF: Rodrigues) that, together with tvec, performs a change of
  12529. basis from world to camera coordinate system, see REF: calibrateCamera for details.</dd>
  12530. <dd><code>tvec</code> - The translation vector, see parameter description above.</dd>
  12531. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  12532. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12533. \(\distcoeffs\) . If the vector is empty, the zero distortion coefficients are assumed.</dd>
  12534. <dd><code>imagePoints</code> - Output array of image points, 1xN/Nx1 2-channel, or
  12535. vector&lt;Point2f&gt; .</dd>
  12536. <dd><code>jacobian</code> - Optional output 2Nx(10+&lt;numDistCoeffs&gt;) jacobian matrix of derivatives of image
  12537. points with respect to components of the rotation vector, translation vector, focal lengths,
  12538. coordinates of the principal point and the distortion coefficients. In the old interface different
  12539. components of the jacobian are returned via different output parameters.
  12540. function assumes that the aspect ratio (\(f_x / f_y\)) is fixed and correspondingly adjusts the
  12541. jacobian matrix.
  12542. The function computes the 2D projections of 3D points to the image plane, given intrinsic and
  12543. extrinsic camera parameters. Optionally, the function computes Jacobians -matrices of partial
  12544. derivatives of image points coordinates (as functions of all the input parameters) with respect to
  12545. the particular parameters, intrinsic and/or extrinsic. The Jacobians are used during the global
  12546. optimization in REF: calibrateCamera, REF: solvePnP, and REF: stereoCalibrate. The function itself
  12547. can also be used to compute a re-projection error, given the current intrinsic and extrinsic
  12548. parameters.
  12549. <b>Note:</b> By setting rvec = tvec = \([0, 0, 0]\), or by setting cameraMatrix to a 3x3 identity matrix,
  12550. or by passing zero distortion coefficients, one can get various useful partial cases of the
  12551. function. This means, one can compute the distorted coordinates for a sparse set of points or apply
  12552. a perspective transformation (and also compute the derivatives) in the ideal zero-distortion setup.</dd>
  12553. </dl>
  12554. </li>
  12555. </ul>
  12556. <a name="projectPoints-org.opencv.core.MatOfPoint3f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-double-">
  12557. <!-- -->
  12558. </a>
  12559. <ul class="blockList">
  12560. <li class="blockList">
  12561. <h4>projectPoints</h4>
  12562. <pre>public static&nbsp;void&nbsp;projectPoints(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  12563. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  12564. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  12565. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12566. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  12567. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  12568. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian,
  12569. double&nbsp;aspectRatio)</pre>
  12570. <div class="block">Projects 3D points to an image plane.</div>
  12571. <dl>
  12572. <dt><span class="paramLabel">Parameters:</span></dt>
  12573. <dd><code>objectPoints</code> - Array of object points expressed wrt. the world coordinate frame. A 3xN/Nx3
  12574. 1-channel or 1xN/Nx1 3-channel (or vector&lt;Point3f&gt; ), where N is the number of points in the view.</dd>
  12575. <dd><code>rvec</code> - The rotation vector (REF: Rodrigues) that, together with tvec, performs a change of
  12576. basis from world to camera coordinate system, see REF: calibrateCamera for details.</dd>
  12577. <dd><code>tvec</code> - The translation vector, see parameter description above.</dd>
  12578. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  12579. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  12580. \(\distcoeffs\) . If the vector is empty, the zero distortion coefficients are assumed.</dd>
  12581. <dd><code>imagePoints</code> - Output array of image points, 1xN/Nx1 2-channel, or
  12582. vector&lt;Point2f&gt; .</dd>
  12583. <dd><code>jacobian</code> - Optional output 2Nx(10+&lt;numDistCoeffs&gt;) jacobian matrix of derivatives of image
  12584. points with respect to components of the rotation vector, translation vector, focal lengths,
  12585. coordinates of the principal point and the distortion coefficients. In the old interface different
  12586. components of the jacobian are returned via different output parameters.</dd>
  12587. <dd><code>aspectRatio</code> - Optional "fixed aspect ratio" parameter. If the parameter is not 0, the
  12588. function assumes that the aspect ratio (\(f_x / f_y\)) is fixed and correspondingly adjusts the
  12589. jacobian matrix.
  12590. The function computes the 2D projections of 3D points to the image plane, given intrinsic and
  12591. extrinsic camera parameters. Optionally, the function computes Jacobians -matrices of partial
  12592. derivatives of image points coordinates (as functions of all the input parameters) with respect to
  12593. the particular parameters, intrinsic and/or extrinsic. The Jacobians are used during the global
  12594. optimization in REF: calibrateCamera, REF: solvePnP, and REF: stereoCalibrate. The function itself
  12595. can also be used to compute a re-projection error, given the current intrinsic and extrinsic
  12596. parameters.
  12597. <b>Note:</b> By setting rvec = tvec = \([0, 0, 0]\), or by setting cameraMatrix to a 3x3 identity matrix,
  12598. or by passing zero distortion coefficients, one can get various useful partial cases of the
  12599. function. This means, one can compute the distorted coordinates for a sparse set of points or apply
  12600. a perspective transformation (and also compute the derivatives) in the ideal zero-distortion setup.</dd>
  12601. </dl>
  12602. </li>
  12603. </ul>
  12604. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  12605. <!-- -->
  12606. </a>
  12607. <ul class="blockList">
  12608. <li class="blockList">
  12609. <h4>recoverPose</h4>
  12610. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12611. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12612. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12613. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12614. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</pre>
  12615. <dl>
  12616. <dt><span class="paramLabel">Parameters:</span></dt>
  12617. <dd><code>E</code> - The input essential matrix.</dd>
  12618. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12619. floating-point (single or double precision).</dd>
  12620. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  12621. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12622. that performs a change of basis from the first camera's coordinate system to the second camera's
  12623. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12624. description below.</dd>
  12625. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12626. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12627. length.
  12628. are feature points from cameras with same focal length and principal point.
  12629. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12630. recover pose. In the output mask only inliers which pass the chirality check.
  12631. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  12632. principal point:
  12633. \(A =
  12634. \begin{bmatrix}
  12635. f &amp; 0 &amp; x_{pp} \\
  12636. 0 &amp; f &amp; y_{pp} \\
  12637. 0 &amp; 0 &amp; 1
  12638. \end{bmatrix}\)</dd>
  12639. <dt><span class="returnLabel">Returns:</span></dt>
  12640. <dd>automatically generated</dd>
  12641. </dl>
  12642. </li>
  12643. </ul>
  12644. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  12645. <!-- -->
  12646. </a>
  12647. <ul class="blockList">
  12648. <li class="blockList">
  12649. <h4>recoverPose</h4>
  12650. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12651. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12652. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12653. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12654. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12655. double&nbsp;focal)</pre>
  12656. <dl>
  12657. <dt><span class="paramLabel">Parameters:</span></dt>
  12658. <dd><code>E</code> - The input essential matrix.</dd>
  12659. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12660. floating-point (single or double precision).</dd>
  12661. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  12662. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12663. that performs a change of basis from the first camera's coordinate system to the second camera's
  12664. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12665. description below.</dd>
  12666. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12667. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12668. length.</dd>
  12669. <dd><code>focal</code> - Focal length of the camera. Note that this function assumes that points1 and points2
  12670. are feature points from cameras with same focal length and principal point.
  12671. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12672. recover pose. In the output mask only inliers which pass the chirality check.
  12673. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  12674. principal point:
  12675. \(A =
  12676. \begin{bmatrix}
  12677. f &amp; 0 &amp; x_{pp} \\
  12678. 0 &amp; f &amp; y_{pp} \\
  12679. 0 &amp; 0 &amp; 1
  12680. \end{bmatrix}\)</dd>
  12681. <dt><span class="returnLabel">Returns:</span></dt>
  12682. <dd>automatically generated</dd>
  12683. </dl>
  12684. </li>
  12685. </ul>
  12686. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-">
  12687. <!-- -->
  12688. </a>
  12689. <ul class="blockList">
  12690. <li class="blockList">
  12691. <h4>recoverPose</h4>
  12692. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12693. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12694. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12695. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12696. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12697. double&nbsp;focal,
  12698. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp)</pre>
  12699. <dl>
  12700. <dt><span class="paramLabel">Parameters:</span></dt>
  12701. <dd><code>E</code> - The input essential matrix.</dd>
  12702. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12703. floating-point (single or double precision).</dd>
  12704. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  12705. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12706. that performs a change of basis from the first camera's coordinate system to the second camera's
  12707. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12708. description below.</dd>
  12709. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12710. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12711. length.</dd>
  12712. <dd><code>focal</code> - Focal length of the camera. Note that this function assumes that points1 and points2
  12713. are feature points from cameras with same focal length and principal point.</dd>
  12714. <dd><code>pp</code> - principal point of the camera.
  12715. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12716. recover pose. In the output mask only inliers which pass the chirality check.
  12717. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  12718. principal point:
  12719. \(A =
  12720. \begin{bmatrix}
  12721. f &amp; 0 &amp; x_{pp} \\
  12722. 0 &amp; f &amp; y_{pp} \\
  12723. 0 &amp; 0 &amp; 1
  12724. \end{bmatrix}\)</dd>
  12725. <dt><span class="returnLabel">Returns:</span></dt>
  12726. <dd>automatically generated</dd>
  12727. </dl>
  12728. </li>
  12729. </ul>
  12730. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Point-org.opencv.core.Mat-">
  12731. <!-- -->
  12732. </a>
  12733. <ul class="blockList">
  12734. <li class="blockList">
  12735. <h4>recoverPose</h4>
  12736. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12737. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12738. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12739. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12740. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12741. double&nbsp;focal,
  12742. <a href="../../../org/opencv/core/Point.html" title="class in org.opencv.core">Point</a>&nbsp;pp,
  12743. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  12744. <dl>
  12745. <dt><span class="paramLabel">Parameters:</span></dt>
  12746. <dd><code>E</code> - The input essential matrix.</dd>
  12747. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12748. floating-point (single or double precision).</dd>
  12749. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  12750. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12751. that performs a change of basis from the first camera's coordinate system to the second camera's
  12752. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12753. description below.</dd>
  12754. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12755. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12756. length.</dd>
  12757. <dd><code>focal</code> - Focal length of the camera. Note that this function assumes that points1 and points2
  12758. are feature points from cameras with same focal length and principal point.</dd>
  12759. <dd><code>pp</code> - principal point of the camera.</dd>
  12760. <dd><code>mask</code> - Input/output mask for inliers in points1 and points2. If it is not empty, then it marks
  12761. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12762. recover pose. In the output mask only inliers which pass the chirality check.
  12763. This function differs from the one above that it computes camera intrinsic matrix from focal length and
  12764. principal point:
  12765. \(A =
  12766. \begin{bmatrix}
  12767. f &amp; 0 &amp; x_{pp} \\
  12768. 0 &amp; f &amp; y_{pp} \\
  12769. 0 &amp; 0 &amp; 1
  12770. \end{bmatrix}\)</dd>
  12771. <dt><span class="returnLabel">Returns:</span></dt>
  12772. <dd>automatically generated</dd>
  12773. </dl>
  12774. </li>
  12775. </ul>
  12776. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  12777. <!-- -->
  12778. </a>
  12779. <ul class="blockList">
  12780. <li class="blockList">
  12781. <h4>recoverPose</h4>
  12782. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12783. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12784. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12785. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12786. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12787. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</pre>
  12788. <div class="block">Recovers the relative camera rotation and the translation from an estimated essential
  12789. matrix and the corresponding points in two images, using chirality check. Returns the number of
  12790. inliers that pass the check.</div>
  12791. <dl>
  12792. <dt><span class="paramLabel">Parameters:</span></dt>
  12793. <dd><code>E</code> - The input essential matrix.</dd>
  12794. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12795. floating-point (single or double precision).</dd>
  12796. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  12797. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  12798. Note that this function assumes that points1 and points2 are feature points from cameras with the
  12799. same camera intrinsic matrix.</dd>
  12800. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12801. that performs a change of basis from the first camera's coordinate system to the second camera's
  12802. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12803. described below.</dd>
  12804. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12805. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12806. length.
  12807. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12808. recover pose. In the output mask only inliers which pass the chirality check.
  12809. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  12810. possible pose hypotheses by doing chirality check. The chirality check means that the
  12811. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  12812. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  12813. scenario, points1 and points2 are the same input for #findEssentialMat :
  12814. <code>
  12815. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  12816. int point_count = 100;
  12817. vector&lt;Point2f&gt; points1(point_count);
  12818. vector&lt;Point2f&gt; points2(point_count);
  12819. // initialize the points here ...
  12820. for( int i = 0; i &lt; point_count; i++ )
  12821. {
  12822. points1[i] = ...;
  12823. points2[i] = ...;
  12824. }
  12825. // cametra matrix with both focal lengths = 1, and principal point = (0, 0)
  12826. Mat cameraMatrix = Mat::eye(3, 3, CV_64F);
  12827. Mat E, R, t, mask;
  12828. E = findEssentialMat(points1, points2, cameraMatrix, RANSAC, 0.999, 1.0, mask);
  12829. recoverPose(E, points1, points2, cameraMatrix, R, t, mask);
  12830. </code></dd>
  12831. <dt><span class="returnLabel">Returns:</span></dt>
  12832. <dd>automatically generated</dd>
  12833. </dl>
  12834. </li>
  12835. </ul>
  12836. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  12837. <!-- -->
  12838. </a>
  12839. <ul class="blockList">
  12840. <li class="blockList">
  12841. <h4>recoverPose</h4>
  12842. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12843. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12844. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12845. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12846. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12847. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12848. double&nbsp;distanceThresh)</pre>
  12849. <dl>
  12850. <dt><span class="paramLabel">Parameters:</span></dt>
  12851. <dd><code>E</code> - The input essential matrix.</dd>
  12852. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12853. floating-point (single or double precision).</dd>
  12854. <dd><code>points2</code> - Array of the second image points of the same size and format as points1.</dd>
  12855. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  12856. Note that this function assumes that points1 and points2 are feature points from cameras with the
  12857. same camera intrinsic matrix.</dd>
  12858. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12859. that performs a change of basis from the first camera's coordinate system to the second camera's
  12860. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12861. description below.</dd>
  12862. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12863. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12864. length.</dd>
  12865. <dd><code>distanceThresh</code> - threshold distance which is used to filter out far away points (i.e. infinite
  12866. points).
  12867. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12868. recover pose. In the output mask only inliers which pass the chirality check.
  12869. This function differs from the one above that it outputs the triangulated 3D point that are used for
  12870. the chirality check.</dd>
  12871. <dt><span class="returnLabel">Returns:</span></dt>
  12872. <dd>automatically generated</dd>
  12873. </dl>
  12874. </li>
  12875. </ul>
  12876. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Mat-">
  12877. <!-- -->
  12878. </a>
  12879. <ul class="blockList">
  12880. <li class="blockList">
  12881. <h4>recoverPose</h4>
  12882. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12883. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12884. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12885. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12886. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12887. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12888. double&nbsp;distanceThresh,
  12889. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  12890. <dl>
  12891. <dt><span class="paramLabel">Parameters:</span></dt>
  12892. <dd><code>E</code> - The input essential matrix.</dd>
  12893. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12894. floating-point (single or double precision).</dd>
  12895. <dd><code>points2</code> - Array of the second image points of the same size and format as points1.</dd>
  12896. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  12897. Note that this function assumes that points1 and points2 are feature points from cameras with the
  12898. same camera intrinsic matrix.</dd>
  12899. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12900. that performs a change of basis from the first camera's coordinate system to the second camera's
  12901. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12902. description below.</dd>
  12903. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12904. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12905. length.</dd>
  12906. <dd><code>distanceThresh</code> - threshold distance which is used to filter out far away points (i.e. infinite
  12907. points).</dd>
  12908. <dd><code>mask</code> - Input/output mask for inliers in points1 and points2. If it is not empty, then it marks
  12909. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12910. recover pose. In the output mask only inliers which pass the chirality check.
  12911. This function differs from the one above that it outputs the triangulated 3D point that are used for
  12912. the chirality check.</dd>
  12913. <dt><span class="returnLabel">Returns:</span></dt>
  12914. <dd>automatically generated</dd>
  12915. </dl>
  12916. </li>
  12917. </ul>
  12918. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Mat-org.opencv.core.Mat-">
  12919. <!-- -->
  12920. </a>
  12921. <ul class="blockList">
  12922. <li class="blockList">
  12923. <h4>recoverPose</h4>
  12924. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12925. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12926. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12927. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12928. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12929. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12930. double&nbsp;distanceThresh,
  12931. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask,
  12932. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;triangulatedPoints)</pre>
  12933. <dl>
  12934. <dt><span class="paramLabel">Parameters:</span></dt>
  12935. <dd><code>E</code> - The input essential matrix.</dd>
  12936. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12937. floating-point (single or double precision).</dd>
  12938. <dd><code>points2</code> - Array of the second image points of the same size and format as points1.</dd>
  12939. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  12940. Note that this function assumes that points1 and points2 are feature points from cameras with the
  12941. same camera intrinsic matrix.</dd>
  12942. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12943. that performs a change of basis from the first camera's coordinate system to the second camera's
  12944. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12945. description below.</dd>
  12946. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12947. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12948. length.</dd>
  12949. <dd><code>distanceThresh</code> - threshold distance which is used to filter out far away points (i.e. infinite
  12950. points).</dd>
  12951. <dd><code>mask</code> - Input/output mask for inliers in points1 and points2. If it is not empty, then it marks
  12952. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12953. recover pose. In the output mask only inliers which pass the chirality check.</dd>
  12954. <dd><code>triangulatedPoints</code> - 3D points which were reconstructed by triangulation.
  12955. This function differs from the one above that it outputs the triangulated 3D point that are used for
  12956. the chirality check.</dd>
  12957. <dt><span class="returnLabel">Returns:</span></dt>
  12958. <dd>automatically generated</dd>
  12959. </dl>
  12960. </li>
  12961. </ul>
  12962. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  12963. <!-- -->
  12964. </a>
  12965. <ul class="blockList">
  12966. <li class="blockList">
  12967. <h4>recoverPose</h4>
  12968. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  12969. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  12970. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  12971. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  12972. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  12973. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  12974. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  12975. <div class="block">Recovers the relative camera rotation and the translation from an estimated essential
  12976. matrix and the corresponding points in two images, using chirality check. Returns the number of
  12977. inliers that pass the check.</div>
  12978. <dl>
  12979. <dt><span class="paramLabel">Parameters:</span></dt>
  12980. <dd><code>E</code> - The input essential matrix.</dd>
  12981. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  12982. floating-point (single or double precision).</dd>
  12983. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  12984. <dd><code>cameraMatrix</code> - Camera intrinsic matrix \(\cameramatrix{A}\) .
  12985. Note that this function assumes that points1 and points2 are feature points from cameras with the
  12986. same camera intrinsic matrix.</dd>
  12987. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  12988. that performs a change of basis from the first camera's coordinate system to the second camera's
  12989. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  12990. described below.</dd>
  12991. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  12992. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  12993. length.</dd>
  12994. <dd><code>mask</code> - Input/output mask for inliers in points1 and points2. If it is not empty, then it marks
  12995. inliers in points1 and points2 for the given essential matrix E. Only these inliers will be used to
  12996. recover pose. In the output mask only inliers which pass the chirality check.
  12997. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  12998. possible pose hypotheses by doing chirality check. The chirality check means that the
  12999. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  13000. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  13001. scenario, points1 and points2 are the same input for #findEssentialMat :
  13002. <code>
  13003. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  13004. int point_count = 100;
  13005. vector&lt;Point2f&gt; points1(point_count);
  13006. vector&lt;Point2f&gt; points2(point_count);
  13007. // initialize the points here ...
  13008. for( int i = 0; i &lt; point_count; i++ )
  13009. {
  13010. points1[i] = ...;
  13011. points2[i] = ...;
  13012. }
  13013. // cametra matrix with both focal lengths = 1, and principal point = (0, 0)
  13014. Mat cameraMatrix = Mat::eye(3, 3, CV_64F);
  13015. Mat E, R, t, mask;
  13016. E = findEssentialMat(points1, points2, cameraMatrix, RANSAC, 0.999, 1.0, mask);
  13017. recoverPose(E, points1, points2, cameraMatrix, R, t, mask);
  13018. </code></dd>
  13019. <dt><span class="returnLabel">Returns:</span></dt>
  13020. <dd>automatically generated</dd>
  13021. </dl>
  13022. </li>
  13023. </ul>
  13024. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13025. <!-- -->
  13026. </a>
  13027. <ul class="blockList">
  13028. <li class="blockList">
  13029. <h4>recoverPose</h4>
  13030. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  13031. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  13032. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  13033. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  13034. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  13035. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  13036. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  13037. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  13038. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t)</pre>
  13039. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check. Returns the number of
  13040. inliers that pass the check.</div>
  13041. <dl>
  13042. <dt><span class="paramLabel">Parameters:</span></dt>
  13043. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  13044. floating-point (single or double precision).</dd>
  13045. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  13046. <dd><code>cameraMatrix1</code> - Input/output camera matrix for the first camera, the same as in
  13047. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13048. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  13049. REF: calibrateCamera.</dd>
  13050. <dd><code>cameraMatrix2</code> - Input/output camera matrix for the first camera, the same as in
  13051. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13052. <dd><code>distCoeffs2</code> - Input/output vector of distortion coefficients, the same as in
  13053. REF: calibrateCamera.</dd>
  13054. <dd><code>E</code> - The output essential matrix.</dd>
  13055. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  13056. that performs a change of basis from the first camera's coordinate system to the second camera's
  13057. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  13058. described below.</dd>
  13059. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  13060. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  13061. length.
  13062. <ul>
  13063. <li>
  13064. REF: RANSAC for the RANSAC algorithm.
  13065. </li>
  13066. <li>
  13067. REF: LMEDS for the LMedS algorithm.
  13068. </li>
  13069. </ul>
  13070. confidence (probability) that the estimated matrix is correct.
  13071. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  13072. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  13073. point localization, image resolution, and the image noise.
  13074. inliers in points1 and points2 for then given essential matrix E. Only these inliers will be used to
  13075. recover pose. In the output mask only inliers which pass the cheirality check.
  13076. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  13077. possible pose hypotheses by doing cheirality check. The cheirality check means that the
  13078. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  13079. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  13080. scenario, points1 and points2 are the same input for findEssentialMat.:
  13081. <code>
  13082. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  13083. int point_count = 100;
  13084. vector&lt;Point2f&gt; points1(point_count);
  13085. vector&lt;Point2f&gt; points2(point_count);
  13086. // initialize the points here ...
  13087. for( int i = 0; i &lt; point_count; i++ )
  13088. {
  13089. points1[i] = ...;
  13090. points2[i] = ...;
  13091. }
  13092. // Input: camera calibration of both cameras, for example using intrinsic chessboard calibration.
  13093. Mat cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2;
  13094. // Output: Essential matrix, relative rotation and relative translation.
  13095. Mat E, R, t, mask;
  13096. recoverPose(points1, points2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, E, R, t, mask);
  13097. </code></dd>
  13098. <dt><span class="returnLabel">Returns:</span></dt>
  13099. <dd>automatically generated</dd>
  13100. </dl>
  13101. </li>
  13102. </ul>
  13103. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  13104. <!-- -->
  13105. </a>
  13106. <ul class="blockList">
  13107. <li class="blockList">
  13108. <h4>recoverPose</h4>
  13109. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  13110. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  13111. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  13112. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  13113. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  13114. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  13115. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  13116. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  13117. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  13118. int&nbsp;method)</pre>
  13119. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check. Returns the number of
  13120. inliers that pass the check.</div>
  13121. <dl>
  13122. <dt><span class="paramLabel">Parameters:</span></dt>
  13123. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  13124. floating-point (single or double precision).</dd>
  13125. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  13126. <dd><code>cameraMatrix1</code> - Input/output camera matrix for the first camera, the same as in
  13127. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13128. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  13129. REF: calibrateCamera.</dd>
  13130. <dd><code>cameraMatrix2</code> - Input/output camera matrix for the first camera, the same as in
  13131. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13132. <dd><code>distCoeffs2</code> - Input/output vector of distortion coefficients, the same as in
  13133. REF: calibrateCamera.</dd>
  13134. <dd><code>E</code> - The output essential matrix.</dd>
  13135. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  13136. that performs a change of basis from the first camera's coordinate system to the second camera's
  13137. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  13138. described below.</dd>
  13139. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  13140. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  13141. length.</dd>
  13142. <dd><code>method</code> - Method for computing an essential matrix.
  13143. <ul>
  13144. <li>
  13145. REF: RANSAC for the RANSAC algorithm.
  13146. </li>
  13147. <li>
  13148. REF: LMEDS for the LMedS algorithm.
  13149. </li>
  13150. </ul>
  13151. confidence (probability) that the estimated matrix is correct.
  13152. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  13153. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  13154. point localization, image resolution, and the image noise.
  13155. inliers in points1 and points2 for then given essential matrix E. Only these inliers will be used to
  13156. recover pose. In the output mask only inliers which pass the cheirality check.
  13157. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  13158. possible pose hypotheses by doing cheirality check. The cheirality check means that the
  13159. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  13160. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  13161. scenario, points1 and points2 are the same input for findEssentialMat.:
  13162. <code>
  13163. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  13164. int point_count = 100;
  13165. vector&lt;Point2f&gt; points1(point_count);
  13166. vector&lt;Point2f&gt; points2(point_count);
  13167. // initialize the points here ...
  13168. for( int i = 0; i &lt; point_count; i++ )
  13169. {
  13170. points1[i] = ...;
  13171. points2[i] = ...;
  13172. }
  13173. // Input: camera calibration of both cameras, for example using intrinsic chessboard calibration.
  13174. Mat cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2;
  13175. // Output: Essential matrix, relative rotation and relative translation.
  13176. Mat E, R, t, mask;
  13177. recoverPose(points1, points2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, E, R, t, mask);
  13178. </code></dd>
  13179. <dt><span class="returnLabel">Returns:</span></dt>
  13180. <dd>automatically generated</dd>
  13181. </dl>
  13182. </li>
  13183. </ul>
  13184. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">
  13185. <!-- -->
  13186. </a>
  13187. <ul class="blockList">
  13188. <li class="blockList">
  13189. <h4>recoverPose</h4>
  13190. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  13191. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  13192. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  13193. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  13194. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  13195. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  13196. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  13197. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  13198. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  13199. int&nbsp;method,
  13200. double&nbsp;prob)</pre>
  13201. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check. Returns the number of
  13202. inliers that pass the check.</div>
  13203. <dl>
  13204. <dt><span class="paramLabel">Parameters:</span></dt>
  13205. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  13206. floating-point (single or double precision).</dd>
  13207. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  13208. <dd><code>cameraMatrix1</code> - Input/output camera matrix for the first camera, the same as in
  13209. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13210. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  13211. REF: calibrateCamera.</dd>
  13212. <dd><code>cameraMatrix2</code> - Input/output camera matrix for the first camera, the same as in
  13213. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13214. <dd><code>distCoeffs2</code> - Input/output vector of distortion coefficients, the same as in
  13215. REF: calibrateCamera.</dd>
  13216. <dd><code>E</code> - The output essential matrix.</dd>
  13217. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  13218. that performs a change of basis from the first camera's coordinate system to the second camera's
  13219. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  13220. described below.</dd>
  13221. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  13222. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  13223. length.</dd>
  13224. <dd><code>method</code> - Method for computing an essential matrix.
  13225. <ul>
  13226. <li>
  13227. REF: RANSAC for the RANSAC algorithm.
  13228. </li>
  13229. <li>
  13230. REF: LMEDS for the LMedS algorithm.
  13231. </li>
  13232. </ul></dd>
  13233. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  13234. confidence (probability) that the estimated matrix is correct.
  13235. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  13236. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  13237. point localization, image resolution, and the image noise.
  13238. inliers in points1 and points2 for then given essential matrix E. Only these inliers will be used to
  13239. recover pose. In the output mask only inliers which pass the cheirality check.
  13240. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  13241. possible pose hypotheses by doing cheirality check. The cheirality check means that the
  13242. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  13243. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  13244. scenario, points1 and points2 are the same input for findEssentialMat.:
  13245. <code>
  13246. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  13247. int point_count = 100;
  13248. vector&lt;Point2f&gt; points1(point_count);
  13249. vector&lt;Point2f&gt; points2(point_count);
  13250. // initialize the points here ...
  13251. for( int i = 0; i &lt; point_count; i++ )
  13252. {
  13253. points1[i] = ...;
  13254. points2[i] = ...;
  13255. }
  13256. // Input: camera calibration of both cameras, for example using intrinsic chessboard calibration.
  13257. Mat cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2;
  13258. // Output: Essential matrix, relative rotation and relative translation.
  13259. Mat E, R, t, mask;
  13260. recoverPose(points1, points2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, E, R, t, mask);
  13261. </code></dd>
  13262. <dt><span class="returnLabel">Returns:</span></dt>
  13263. <dd>automatically generated</dd>
  13264. </dl>
  13265. </li>
  13266. </ul>
  13267. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-">
  13268. <!-- -->
  13269. </a>
  13270. <ul class="blockList">
  13271. <li class="blockList">
  13272. <h4>recoverPose</h4>
  13273. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  13274. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  13275. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  13276. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  13277. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  13278. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  13279. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  13280. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  13281. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  13282. int&nbsp;method,
  13283. double&nbsp;prob,
  13284. double&nbsp;threshold)</pre>
  13285. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check. Returns the number of
  13286. inliers that pass the check.</div>
  13287. <dl>
  13288. <dt><span class="paramLabel">Parameters:</span></dt>
  13289. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  13290. floating-point (single or double precision).</dd>
  13291. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  13292. <dd><code>cameraMatrix1</code> - Input/output camera matrix for the first camera, the same as in
  13293. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13294. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  13295. REF: calibrateCamera.</dd>
  13296. <dd><code>cameraMatrix2</code> - Input/output camera matrix for the first camera, the same as in
  13297. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13298. <dd><code>distCoeffs2</code> - Input/output vector of distortion coefficients, the same as in
  13299. REF: calibrateCamera.</dd>
  13300. <dd><code>E</code> - The output essential matrix.</dd>
  13301. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  13302. that performs a change of basis from the first camera's coordinate system to the second camera's
  13303. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  13304. described below.</dd>
  13305. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  13306. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  13307. length.</dd>
  13308. <dd><code>method</code> - Method for computing an essential matrix.
  13309. <ul>
  13310. <li>
  13311. REF: RANSAC for the RANSAC algorithm.
  13312. </li>
  13313. <li>
  13314. REF: LMEDS for the LMedS algorithm.
  13315. </li>
  13316. </ul></dd>
  13317. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  13318. confidence (probability) that the estimated matrix is correct.</dd>
  13319. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  13320. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  13321. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  13322. point localization, image resolution, and the image noise.
  13323. inliers in points1 and points2 for then given essential matrix E. Only these inliers will be used to
  13324. recover pose. In the output mask only inliers which pass the cheirality check.
  13325. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  13326. possible pose hypotheses by doing cheirality check. The cheirality check means that the
  13327. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  13328. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  13329. scenario, points1 and points2 are the same input for findEssentialMat.:
  13330. <code>
  13331. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  13332. int point_count = 100;
  13333. vector&lt;Point2f&gt; points1(point_count);
  13334. vector&lt;Point2f&gt; points2(point_count);
  13335. // initialize the points here ...
  13336. for( int i = 0; i &lt; point_count; i++ )
  13337. {
  13338. points1[i] = ...;
  13339. points2[i] = ...;
  13340. }
  13341. // Input: camera calibration of both cameras, for example using intrinsic chessboard calibration.
  13342. Mat cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2;
  13343. // Output: Essential matrix, relative rotation and relative translation.
  13344. Mat E, R, t, mask;
  13345. recoverPose(points1, points2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, E, R, t, mask);
  13346. </code></dd>
  13347. <dt><span class="returnLabel">Returns:</span></dt>
  13348. <dd>automatically generated</dd>
  13349. </dl>
  13350. </li>
  13351. </ul>
  13352. <a name="recoverPose-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-double-org.opencv.core.Mat-">
  13353. <!-- -->
  13354. </a>
  13355. <ul class="blockList">
  13356. <li class="blockList">
  13357. <h4>recoverPose</h4>
  13358. <pre>public static&nbsp;int&nbsp;recoverPose(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  13359. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  13360. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  13361. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  13362. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  13363. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  13364. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  13365. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  13366. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;t,
  13367. int&nbsp;method,
  13368. double&nbsp;prob,
  13369. double&nbsp;threshold,
  13370. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mask)</pre>
  13371. <div class="block">Recovers the relative camera rotation and the translation from corresponding points in two images from two different cameras, using cheirality check. Returns the number of
  13372. inliers that pass the check.</div>
  13373. <dl>
  13374. <dt><span class="paramLabel">Parameters:</span></dt>
  13375. <dd><code>points1</code> - Array of N 2D points from the first image. The point coordinates should be
  13376. floating-point (single or double precision).</dd>
  13377. <dd><code>points2</code> - Array of the second image points of the same size and format as points1 .</dd>
  13378. <dd><code>cameraMatrix1</code> - Input/output camera matrix for the first camera, the same as in
  13379. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13380. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  13381. REF: calibrateCamera.</dd>
  13382. <dd><code>cameraMatrix2</code> - Input/output camera matrix for the first camera, the same as in
  13383. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  13384. <dd><code>distCoeffs2</code> - Input/output vector of distortion coefficients, the same as in
  13385. REF: calibrateCamera.</dd>
  13386. <dd><code>E</code> - The output essential matrix.</dd>
  13387. <dd><code>R</code> - Output rotation matrix. Together with the translation vector, this matrix makes up a tuple
  13388. that performs a change of basis from the first camera's coordinate system to the second camera's
  13389. coordinate system. Note that, in general, t can not be used for this tuple, see the parameter
  13390. described below.</dd>
  13391. <dd><code>t</code> - Output translation vector. This vector is obtained by REF: decomposeEssentialMat and
  13392. therefore is only known up to scale, i.e. t is the direction of the translation vector and has unit
  13393. length.</dd>
  13394. <dd><code>method</code> - Method for computing an essential matrix.
  13395. <ul>
  13396. <li>
  13397. REF: RANSAC for the RANSAC algorithm.
  13398. </li>
  13399. <li>
  13400. REF: LMEDS for the LMedS algorithm.
  13401. </li>
  13402. </ul></dd>
  13403. <dd><code>prob</code> - Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of
  13404. confidence (probability) that the estimated matrix is correct.</dd>
  13405. <dd><code>threshold</code> - Parameter used for RANSAC. It is the maximum distance from a point to an epipolar
  13406. line in pixels, beyond which the point is considered an outlier and is not used for computing the
  13407. final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the
  13408. point localization, image resolution, and the image noise.</dd>
  13409. <dd><code>mask</code> - Input/output mask for inliers in points1 and points2. If it is not empty, then it marks
  13410. inliers in points1 and points2 for then given essential matrix E. Only these inliers will be used to
  13411. recover pose. In the output mask only inliers which pass the cheirality check.
  13412. This function decomposes an essential matrix using REF: decomposeEssentialMat and then verifies
  13413. possible pose hypotheses by doing cheirality check. The cheirality check means that the
  13414. triangulated 3D points should have positive depth. Some details can be found in CITE: Nister03.
  13415. This function can be used to process the output E and mask from REF: findEssentialMat. In this
  13416. scenario, points1 and points2 are the same input for findEssentialMat.:
  13417. <code>
  13418. // Example. Estimation of fundamental matrix using the RANSAC algorithm
  13419. int point_count = 100;
  13420. vector&lt;Point2f&gt; points1(point_count);
  13421. vector&lt;Point2f&gt; points2(point_count);
  13422. // initialize the points here ...
  13423. for( int i = 0; i &lt; point_count; i++ )
  13424. {
  13425. points1[i] = ...;
  13426. points2[i] = ...;
  13427. }
  13428. // Input: camera calibration of both cameras, for example using intrinsic chessboard calibration.
  13429. Mat cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2;
  13430. // Output: Essential matrix, relative rotation and relative translation.
  13431. Mat E, R, t, mask;
  13432. recoverPose(points1, points2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, E, R, t, mask);
  13433. </code></dd>
  13434. <dt><span class="returnLabel">Returns:</span></dt>
  13435. <dd>automatically generated</dd>
  13436. </dl>
  13437. </li>
  13438. </ul>
  13439. <a name="rectify3Collinear-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-double-org.opencv.core.Size-org.opencv.core.Rect-org.opencv.core.Rect-int-">
  13440. <!-- -->
  13441. </a>
  13442. <ul class="blockList">
  13443. <li class="blockList">
  13444. <h4>rectify3Collinear</h4>
  13445. <pre>public static&nbsp;float&nbsp;rectify3Collinear(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  13446. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  13447. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  13448. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  13449. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix3,
  13450. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs3,
  13451. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imgpt1,
  13452. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imgpt3,
  13453. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  13454. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R12,
  13455. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T12,
  13456. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R13,
  13457. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T13,
  13458. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  13459. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  13460. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R3,
  13461. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  13462. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  13463. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P3,
  13464. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  13465. double&nbsp;alpha,
  13466. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImgSize,
  13467. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi1,
  13468. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;roi2,
  13469. int&nbsp;flags)</pre>
  13470. </li>
  13471. </ul>
  13472. <a name="reprojectImageTo3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13473. <!-- -->
  13474. </a>
  13475. <ul class="blockList">
  13476. <li class="blockList">
  13477. <h4>reprojectImageTo3D</h4>
  13478. <pre>public static&nbsp;void&nbsp;reprojectImageTo3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  13479. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;_3dImage,
  13480. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q)</pre>
  13481. <div class="block">Reprojects a disparity image to 3D space.</div>
  13482. <dl>
  13483. <dt><span class="paramLabel">Parameters:</span></dt>
  13484. <dd><code>disparity</code> - Input single-channel 8-bit unsigned, 16-bit signed, 32-bit signed or 32-bit
  13485. floating-point disparity image. The values of 8-bit / 16-bit signed formats are assumed to have no
  13486. fractional bits. If the disparity is 16-bit signed format, as computed by REF: StereoBM or
  13487. REF: StereoSGBM and maybe other algorithms, it should be divided by 16 (and scaled to float) before
  13488. being used here.</dd>
  13489. <dd><code>_3dImage</code> - Output 3-channel floating-point image of the same size as disparity. Each element of
  13490. _3dImage(x,y) contains 3D coordinates of the point (x,y) computed from the disparity map. If one
  13491. uses Q obtained by REF: stereoRectify, then the returned points are represented in the first
  13492. camera's rectified coordinate system.</dd>
  13493. <dd><code>Q</code> - \(4 \times 4\) perspective transformation matrix that can be obtained with
  13494. REF: stereoRectify.
  13495. points where the disparity was not computed). If handleMissingValues=true, then pixels with the
  13496. minimal disparity that corresponds to the outliers (see StereoMatcher::compute ) are transformed
  13497. to 3D points with a very large Z value (currently set to 10000).
  13498. depth. ddepth can also be set to CV_16S, CV_32S or CV_32F.
  13499. The function transforms a single-channel disparity map to a 3-channel image representing a 3D
  13500. surface. That is, for each pixel (x,y) and the corresponding disparity d=disparity(x,y) , it
  13501. computes:
  13502. \(\begin{bmatrix}
  13503. X \\
  13504. Y \\
  13505. Z \\
  13506. W
  13507. \end{bmatrix} = Q \begin{bmatrix}
  13508. x \\
  13509. y \\
  13510. \texttt{disparity} (x,y) \\
  13511. z
  13512. \end{bmatrix}.\)
  13513. SEE:
  13514. To reproject a sparse set of points {(x,y,d),...} to 3D space, use perspectiveTransform.</dd>
  13515. </dl>
  13516. </li>
  13517. </ul>
  13518. <a name="reprojectImageTo3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-boolean-">
  13519. <!-- -->
  13520. </a>
  13521. <ul class="blockList">
  13522. <li class="blockList">
  13523. <h4>reprojectImageTo3D</h4>
  13524. <pre>public static&nbsp;void&nbsp;reprojectImageTo3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  13525. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;_3dImage,
  13526. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  13527. boolean&nbsp;handleMissingValues)</pre>
  13528. <div class="block">Reprojects a disparity image to 3D space.</div>
  13529. <dl>
  13530. <dt><span class="paramLabel">Parameters:</span></dt>
  13531. <dd><code>disparity</code> - Input single-channel 8-bit unsigned, 16-bit signed, 32-bit signed or 32-bit
  13532. floating-point disparity image. The values of 8-bit / 16-bit signed formats are assumed to have no
  13533. fractional bits. If the disparity is 16-bit signed format, as computed by REF: StereoBM or
  13534. REF: StereoSGBM and maybe other algorithms, it should be divided by 16 (and scaled to float) before
  13535. being used here.</dd>
  13536. <dd><code>_3dImage</code> - Output 3-channel floating-point image of the same size as disparity. Each element of
  13537. _3dImage(x,y) contains 3D coordinates of the point (x,y) computed from the disparity map. If one
  13538. uses Q obtained by REF: stereoRectify, then the returned points are represented in the first
  13539. camera's rectified coordinate system.</dd>
  13540. <dd><code>Q</code> - \(4 \times 4\) perspective transformation matrix that can be obtained with
  13541. REF: stereoRectify.</dd>
  13542. <dd><code>handleMissingValues</code> - Indicates, whether the function should handle missing values (i.e.
  13543. points where the disparity was not computed). If handleMissingValues=true, then pixels with the
  13544. minimal disparity that corresponds to the outliers (see StereoMatcher::compute ) are transformed
  13545. to 3D points with a very large Z value (currently set to 10000).
  13546. depth. ddepth can also be set to CV_16S, CV_32S or CV_32F.
  13547. The function transforms a single-channel disparity map to a 3-channel image representing a 3D
  13548. surface. That is, for each pixel (x,y) and the corresponding disparity d=disparity(x,y) , it
  13549. computes:
  13550. \(\begin{bmatrix}
  13551. X \\
  13552. Y \\
  13553. Z \\
  13554. W
  13555. \end{bmatrix} = Q \begin{bmatrix}
  13556. x \\
  13557. y \\
  13558. \texttt{disparity} (x,y) \\
  13559. z
  13560. \end{bmatrix}.\)
  13561. SEE:
  13562. To reproject a sparse set of points {(x,y,d),...} to 3D space, use perspectiveTransform.</dd>
  13563. </dl>
  13564. </li>
  13565. </ul>
  13566. <a name="reprojectImageTo3D-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-">
  13567. <!-- -->
  13568. </a>
  13569. <ul class="blockList">
  13570. <li class="blockList">
  13571. <h4>reprojectImageTo3D</h4>
  13572. <pre>public static&nbsp;void&nbsp;reprojectImageTo3D(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  13573. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;_3dImage,
  13574. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  13575. boolean&nbsp;handleMissingValues,
  13576. int&nbsp;ddepth)</pre>
  13577. <div class="block">Reprojects a disparity image to 3D space.</div>
  13578. <dl>
  13579. <dt><span class="paramLabel">Parameters:</span></dt>
  13580. <dd><code>disparity</code> - Input single-channel 8-bit unsigned, 16-bit signed, 32-bit signed or 32-bit
  13581. floating-point disparity image. The values of 8-bit / 16-bit signed formats are assumed to have no
  13582. fractional bits. If the disparity is 16-bit signed format, as computed by REF: StereoBM or
  13583. REF: StereoSGBM and maybe other algorithms, it should be divided by 16 (and scaled to float) before
  13584. being used here.</dd>
  13585. <dd><code>_3dImage</code> - Output 3-channel floating-point image of the same size as disparity. Each element of
  13586. _3dImage(x,y) contains 3D coordinates of the point (x,y) computed from the disparity map. If one
  13587. uses Q obtained by REF: stereoRectify, then the returned points are represented in the first
  13588. camera's rectified coordinate system.</dd>
  13589. <dd><code>Q</code> - \(4 \times 4\) perspective transformation matrix that can be obtained with
  13590. REF: stereoRectify.</dd>
  13591. <dd><code>handleMissingValues</code> - Indicates, whether the function should handle missing values (i.e.
  13592. points where the disparity was not computed). If handleMissingValues=true, then pixels with the
  13593. minimal disparity that corresponds to the outliers (see StereoMatcher::compute ) are transformed
  13594. to 3D points with a very large Z value (currently set to 10000).</dd>
  13595. <dd><code>ddepth</code> - The optional output array depth. If it is -1, the output image will have CV_32F
  13596. depth. ddepth can also be set to CV_16S, CV_32S or CV_32F.
  13597. The function transforms a single-channel disparity map to a 3-channel image representing a 3D
  13598. surface. That is, for each pixel (x,y) and the corresponding disparity d=disparity(x,y) , it
  13599. computes:
  13600. \(\begin{bmatrix}
  13601. X \\
  13602. Y \\
  13603. Z \\
  13604. W
  13605. \end{bmatrix} = Q \begin{bmatrix}
  13606. x \\
  13607. y \\
  13608. \texttt{disparity} (x,y) \\
  13609. z
  13610. \end{bmatrix}.\)
  13611. SEE:
  13612. To reproject a sparse set of points {(x,y,d),...} to 3D space, use perspectiveTransform.</dd>
  13613. </dl>
  13614. </li>
  13615. </ul>
  13616. <a name="Rodrigues-org.opencv.core.Mat-org.opencv.core.Mat-">
  13617. <!-- -->
  13618. </a>
  13619. <ul class="blockList">
  13620. <li class="blockList">
  13621. <h4>Rodrigues</h4>
  13622. <pre>public static&nbsp;void&nbsp;Rodrigues(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  13623. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst)</pre>
  13624. <div class="block">Converts a rotation matrix to a rotation vector or vice versa.</div>
  13625. <dl>
  13626. <dt><span class="paramLabel">Parameters:</span></dt>
  13627. <dd><code>src</code> - Input rotation vector (3x1 or 1x3) or rotation matrix (3x3).</dd>
  13628. <dd><code>dst</code> - Output rotation matrix (3x3) or rotation vector (3x1 or 1x3), respectively.
  13629. derivatives of the output array components with respect to the input array components.
  13630. \(\begin{array}{l} \theta \leftarrow norm(r) \\ r \leftarrow r/ \theta \\ R = \cos(\theta) I + (1- \cos{\theta} ) r r^T + \sin(\theta) \vecthreethree{0}{-r_z}{r_y}{r_z}{0}{-r_x}{-r_y}{r_x}{0} \end{array}\)
  13631. Inverse transformation can be also done easily, since
  13632. \(\sin ( \theta ) \vecthreethree{0}{-r_z}{r_y}{r_z}{0}{-r_x}{-r_y}{r_x}{0} = \frac{R - R^T}{2}\)
  13633. A rotation vector is a convenient and most compact representation of a rotation matrix (since any
  13634. rotation matrix has just 3 degrees of freedom). The representation is used in the global 3D geometry
  13635. optimization procedures like REF: calibrateCamera, REF: stereoCalibrate, or REF: solvePnP .
  13636. <b>Note:</b> More information about the computation of the derivative of a 3D rotation matrix with respect to its exponential coordinate
  13637. can be found in:
  13638. <ul>
  13639. <li>
  13640. A Compact Formula for the Derivative of a 3-D Rotation in Exponential Coordinates, Guillermo Gallego, Anthony J. Yezzi CITE: Gallego2014ACF
  13641. </li>
  13642. </ul>
  13643. <b>Note:</b> Useful information on SE(3) and Lie Groups can be found in:
  13644. <ul>
  13645. <li>
  13646. A tutorial on SE(3) transformation parameterizations and on-manifold optimization, Jose-Luis Blanco CITE: blanco2010tutorial
  13647. </li>
  13648. <li>
  13649. Lie Groups for 2D and 3D Transformation, Ethan Eade CITE: Eade17
  13650. </li>
  13651. <li>
  13652. A micro Lie theory for state estimation in robotics, Joan Solà, Jérémie Deray, Dinesh Atchuthan CITE: Sol2018AML
  13653. </li>
  13654. </ul></dd>
  13655. </dl>
  13656. </li>
  13657. </ul>
  13658. <a name="Rodrigues-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13659. <!-- -->
  13660. </a>
  13661. <ul class="blockList">
  13662. <li class="blockList">
  13663. <h4>Rodrigues</h4>
  13664. <pre>public static&nbsp;void&nbsp;Rodrigues(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  13665. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  13666. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;jacobian)</pre>
  13667. <div class="block">Converts a rotation matrix to a rotation vector or vice versa.</div>
  13668. <dl>
  13669. <dt><span class="paramLabel">Parameters:</span></dt>
  13670. <dd><code>src</code> - Input rotation vector (3x1 or 1x3) or rotation matrix (3x3).</dd>
  13671. <dd><code>dst</code> - Output rotation matrix (3x3) or rotation vector (3x1 or 1x3), respectively.</dd>
  13672. <dd><code>jacobian</code> - Optional output Jacobian matrix, 3x9 or 9x3, which is a matrix of partial
  13673. derivatives of the output array components with respect to the input array components.
  13674. \(\begin{array}{l} \theta \leftarrow norm(r) \\ r \leftarrow r/ \theta \\ R = \cos(\theta) I + (1- \cos{\theta} ) r r^T + \sin(\theta) \vecthreethree{0}{-r_z}{r_y}{r_z}{0}{-r_x}{-r_y}{r_x}{0} \end{array}\)
  13675. Inverse transformation can be also done easily, since
  13676. \(\sin ( \theta ) \vecthreethree{0}{-r_z}{r_y}{r_z}{0}{-r_x}{-r_y}{r_x}{0} = \frac{R - R^T}{2}\)
  13677. A rotation vector is a convenient and most compact representation of a rotation matrix (since any
  13678. rotation matrix has just 3 degrees of freedom). The representation is used in the global 3D geometry
  13679. optimization procedures like REF: calibrateCamera, REF: stereoCalibrate, or REF: solvePnP .
  13680. <b>Note:</b> More information about the computation of the derivative of a 3D rotation matrix with respect to its exponential coordinate
  13681. can be found in:
  13682. <ul>
  13683. <li>
  13684. A Compact Formula for the Derivative of a 3-D Rotation in Exponential Coordinates, Guillermo Gallego, Anthony J. Yezzi CITE: Gallego2014ACF
  13685. </li>
  13686. </ul>
  13687. <b>Note:</b> Useful information on SE(3) and Lie Groups can be found in:
  13688. <ul>
  13689. <li>
  13690. A tutorial on SE(3) transformation parameterizations and on-manifold optimization, Jose-Luis Blanco CITE: blanco2010tutorial
  13691. </li>
  13692. <li>
  13693. Lie Groups for 2D and 3D Transformation, Ethan Eade CITE: Eade17
  13694. </li>
  13695. <li>
  13696. A micro Lie theory for state estimation in robotics, Joan Solà, Jérémie Deray, Dinesh Atchuthan CITE: Sol2018AML
  13697. </li>
  13698. </ul></dd>
  13699. </dl>
  13700. </li>
  13701. </ul>
  13702. <a name="RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13703. <!-- -->
  13704. </a>
  13705. <ul class="blockList">
  13706. <li class="blockList">
  13707. <h4>RQDecomp3x3</h4>
  13708. <pre>public static&nbsp;double[]&nbsp;RQDecomp3x3(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  13709. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  13710. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ)</pre>
  13711. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  13712. <dl>
  13713. <dt><span class="paramLabel">Parameters:</span></dt>
  13714. <dd><code>src</code> - 3x3 input matrix.</dd>
  13715. <dd><code>mtxR</code> - Output 3x3 upper-triangular matrix.</dd>
  13716. <dd><code>mtxQ</code> - Output 3x3 orthogonal matrix.
  13717. The function computes a RQ decomposition using the given rotations. This function is used in
  13718. #decomposeProjectionMatrix to decompose the left 3x3 submatrix of a projection matrix into a camera
  13719. and a rotation matrix.
  13720. It optionally returns three rotation matrices, one for each axis, and the three Euler angles in
  13721. degrees (as the return value) that could be used in OpenGL. Note, there is always more than one
  13722. sequence of rotations about the three principal axes that results in the same orientation of an
  13723. object, e.g. see CITE: Slabaugh . Returned tree rotation matrices and corresponding three Euler angles
  13724. are only one of the possible solutions.</dd>
  13725. <dt><span class="returnLabel">Returns:</span></dt>
  13726. <dd>automatically generated</dd>
  13727. </dl>
  13728. </li>
  13729. </ul>
  13730. <a name="RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13731. <!-- -->
  13732. </a>
  13733. <ul class="blockList">
  13734. <li class="blockList">
  13735. <h4>RQDecomp3x3</h4>
  13736. <pre>public static&nbsp;double[]&nbsp;RQDecomp3x3(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  13737. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  13738. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ,
  13739. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qx)</pre>
  13740. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  13741. <dl>
  13742. <dt><span class="paramLabel">Parameters:</span></dt>
  13743. <dd><code>src</code> - 3x3 input matrix.</dd>
  13744. <dd><code>mtxR</code> - Output 3x3 upper-triangular matrix.</dd>
  13745. <dd><code>mtxQ</code> - Output 3x3 orthogonal matrix.</dd>
  13746. <dd><code>Qx</code> - Optional output 3x3 rotation matrix around x-axis.
  13747. The function computes a RQ decomposition using the given rotations. This function is used in
  13748. #decomposeProjectionMatrix to decompose the left 3x3 submatrix of a projection matrix into a camera
  13749. and a rotation matrix.
  13750. It optionally returns three rotation matrices, one for each axis, and the three Euler angles in
  13751. degrees (as the return value) that could be used in OpenGL. Note, there is always more than one
  13752. sequence of rotations about the three principal axes that results in the same orientation of an
  13753. object, e.g. see CITE: Slabaugh . Returned tree rotation matrices and corresponding three Euler angles
  13754. are only one of the possible solutions.</dd>
  13755. <dt><span class="returnLabel">Returns:</span></dt>
  13756. <dd>automatically generated</dd>
  13757. </dl>
  13758. </li>
  13759. </ul>
  13760. <a name="RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13761. <!-- -->
  13762. </a>
  13763. <ul class="blockList">
  13764. <li class="blockList">
  13765. <h4>RQDecomp3x3</h4>
  13766. <pre>public static&nbsp;double[]&nbsp;RQDecomp3x3(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  13767. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  13768. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ,
  13769. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qx,
  13770. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qy)</pre>
  13771. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  13772. <dl>
  13773. <dt><span class="paramLabel">Parameters:</span></dt>
  13774. <dd><code>src</code> - 3x3 input matrix.</dd>
  13775. <dd><code>mtxR</code> - Output 3x3 upper-triangular matrix.</dd>
  13776. <dd><code>mtxQ</code> - Output 3x3 orthogonal matrix.</dd>
  13777. <dd><code>Qx</code> - Optional output 3x3 rotation matrix around x-axis.</dd>
  13778. <dd><code>Qy</code> - Optional output 3x3 rotation matrix around y-axis.
  13779. The function computes a RQ decomposition using the given rotations. This function is used in
  13780. #decomposeProjectionMatrix to decompose the left 3x3 submatrix of a projection matrix into a camera
  13781. and a rotation matrix.
  13782. It optionally returns three rotation matrices, one for each axis, and the three Euler angles in
  13783. degrees (as the return value) that could be used in OpenGL. Note, there is always more than one
  13784. sequence of rotations about the three principal axes that results in the same orientation of an
  13785. object, e.g. see CITE: Slabaugh . Returned tree rotation matrices and corresponding three Euler angles
  13786. are only one of the possible solutions.</dd>
  13787. <dt><span class="returnLabel">Returns:</span></dt>
  13788. <dd>automatically generated</dd>
  13789. </dl>
  13790. </li>
  13791. </ul>
  13792. <a name="RQDecomp3x3-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13793. <!-- -->
  13794. </a>
  13795. <ul class="blockList">
  13796. <li class="blockList">
  13797. <h4>RQDecomp3x3</h4>
  13798. <pre>public static&nbsp;double[]&nbsp;RQDecomp3x3(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  13799. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxR,
  13800. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;mtxQ,
  13801. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qx,
  13802. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qy,
  13803. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Qz)</pre>
  13804. <div class="block">Computes an RQ decomposition of 3x3 matrices.</div>
  13805. <dl>
  13806. <dt><span class="paramLabel">Parameters:</span></dt>
  13807. <dd><code>src</code> - 3x3 input matrix.</dd>
  13808. <dd><code>mtxR</code> - Output 3x3 upper-triangular matrix.</dd>
  13809. <dd><code>mtxQ</code> - Output 3x3 orthogonal matrix.</dd>
  13810. <dd><code>Qx</code> - Optional output 3x3 rotation matrix around x-axis.</dd>
  13811. <dd><code>Qy</code> - Optional output 3x3 rotation matrix around y-axis.</dd>
  13812. <dd><code>Qz</code> - Optional output 3x3 rotation matrix around z-axis.
  13813. The function computes a RQ decomposition using the given rotations. This function is used in
  13814. #decomposeProjectionMatrix to decompose the left 3x3 submatrix of a projection matrix into a camera
  13815. and a rotation matrix.
  13816. It optionally returns three rotation matrices, one for each axis, and the three Euler angles in
  13817. degrees (as the return value) that could be used in OpenGL. Note, there is always more than one
  13818. sequence of rotations about the three principal axes that results in the same orientation of an
  13819. object, e.g. see CITE: Slabaugh . Returned tree rotation matrices and corresponding three Euler angles
  13820. are only one of the possible solutions.</dd>
  13821. <dt><span class="returnLabel">Returns:</span></dt>
  13822. <dd>automatically generated</dd>
  13823. </dl>
  13824. </li>
  13825. </ul>
  13826. <a name="sampsonDistance-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  13827. <!-- -->
  13828. </a>
  13829. <ul class="blockList">
  13830. <li class="blockList">
  13831. <h4>sampsonDistance</h4>
  13832. <pre>public static&nbsp;double&nbsp;sampsonDistance(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pt1,
  13833. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;pt2,
  13834. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F)</pre>
  13835. <div class="block">Calculates the Sampson Distance between two points.
  13836. The function cv::sampsonDistance calculates and returns the first order approximation of the geometric error as:
  13837. \(
  13838. sd( \texttt{pt1} , \texttt{pt2} )=
  13839. \frac{(\texttt{pt2}^t \cdot \texttt{F} \cdot \texttt{pt1})^2}
  13840. {((\texttt{F} \cdot \texttt{pt1})(0))^2 +
  13841. ((\texttt{F} \cdot \texttt{pt1})(1))^2 +
  13842. ((\texttt{F}^t \cdot \texttt{pt2})(0))^2 +
  13843. ((\texttt{F}^t \cdot \texttt{pt2})(1))^2}
  13844. \)
  13845. The fundamental matrix may be calculated using the #findFundamentalMat function. See CITE: HartleyZ00 11.4.3 for details.</div>
  13846. <dl>
  13847. <dt><span class="paramLabel">Parameters:</span></dt>
  13848. <dd><code>pt1</code> - first homogeneous 2d point</dd>
  13849. <dd><code>pt2</code> - second homogeneous 2d point</dd>
  13850. <dd><code>F</code> - fundamental matrix</dd>
  13851. <dt><span class="returnLabel">Returns:</span></dt>
  13852. <dd>The computed Sampson distance.</dd>
  13853. </dl>
  13854. </li>
  13855. </ul>
  13856. <a name="solveP3P-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-int-">
  13857. <!-- -->
  13858. </a>
  13859. <ul class="blockList">
  13860. <li class="blockList">
  13861. <h4>solveP3P</h4>
  13862. <pre>public static&nbsp;int&nbsp;solveP3P(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  13863. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  13864. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  13865. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  13866. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  13867. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  13868. int&nbsp;flags)</pre>
  13869. <div class="block">Finds an object pose from 3 3D-2D point correspondences.
  13870. SEE: REF: calib3d_solvePnP</div>
  13871. <dl>
  13872. <dt><span class="paramLabel">Parameters:</span></dt>
  13873. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, 3x3 1-channel or
  13874. 1x3/3x1 3-channel. vector&lt;Point3f&gt; can be also passed here.</dd>
  13875. <dd><code>imagePoints</code> - Array of corresponding image points, 3x2 1-channel or 1x3/3x1 2-channel.
  13876. vector&lt;Point2f&gt; can be also passed here.</dd>
  13877. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  13878. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  13879. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  13880. assumed.</dd>
  13881. <dd><code>rvecs</code> - Output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  13882. the model coordinate system to the camera coordinate system. A P3P problem has up to 4 solutions.</dd>
  13883. <dd><code>tvecs</code> - Output translation vectors.</dd>
  13884. <dd><code>flags</code> - Method for solving a P3P problem:
  13885. <ul>
  13886. <li>
  13887. REF: SOLVEPNP_P3P Method is based on the paper of X.S. Gao, X.-R. Hou, J. Tang, H.-F. Chang
  13888. "Complete Solution Classification for the Perspective-Three-Point Problem" (CITE: gao2003complete).
  13889. </li>
  13890. <li>
  13891. REF: SOLVEPNP_AP3P Method is based on the paper of T. Ke and S. Roumeliotis.
  13892. "An Efficient Algebraic Solution to the Perspective-Three-Point Problem" (CITE: Ke17).
  13893. </li>
  13894. </ul>
  13895. The function estimates the object pose given 3 object points, their corresponding image
  13896. projections, as well as the camera intrinsic matrix and the distortion coefficients.
  13897. <b>Note:</b>
  13898. The solutions are sorted by reprojection errors (lowest to highest).</dd>
  13899. <dt><span class="returnLabel">Returns:</span></dt>
  13900. <dd>automatically generated</dd>
  13901. </dl>
  13902. </li>
  13903. </ul>
  13904. <a name="solvePnP-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-">
  13905. <!-- -->
  13906. </a>
  13907. <ul class="blockList">
  13908. <li class="blockList">
  13909. <h4>solvePnP</h4>
  13910. <pre>public static&nbsp;boolean&nbsp;solvePnP(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  13911. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  13912. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  13913. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  13914. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  13915. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</pre>
  13916. <div class="block">Finds an object pose from 3D-2D point correspondences.
  13917. SEE: REF: calib3d_solvePnP
  13918. This function returns the rotation and the translation vectors that transform a 3D point expressed in the object
  13919. coordinate frame to the camera coordinate frame, using different methods:
  13920. <ul>
  13921. <li>
  13922. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): need 4 input points to return a unique solution.
  13923. </li>
  13924. <li>
  13925. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar.
  13926. </li>
  13927. <li>
  13928. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  13929. Number of input points must be 4. Object points must be defined in the following order:
  13930. <ul>
  13931. <li>
  13932. point 0: [-squareLength / 2, squareLength / 2, 0]
  13933. </li>
  13934. <li>
  13935. point 1: [ squareLength / 2, squareLength / 2, 0]
  13936. </li>
  13937. <li>
  13938. point 2: [ squareLength / 2, -squareLength / 2, 0]
  13939. </li>
  13940. <li>
  13941. point 3: [-squareLength / 2, -squareLength / 2, 0]
  13942. </li>
  13943. </ul>
  13944. <li>
  13945. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  13946. </li>
  13947. </ul></div>
  13948. <dl>
  13949. <dt><span class="paramLabel">Parameters:</span></dt>
  13950. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  13951. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  13952. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  13953. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  13954. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  13955. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  13956. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  13957. assumed.</dd>
  13958. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  13959. the model coordinate system to the camera coordinate system.</dd>
  13960. <dd><code>tvec</code> - Output translation vector.
  13961. the provided rvec and tvec values as initial approximations of the rotation and translation
  13962. vectors, respectively, and further optimizes them.
  13963. More information about Perspective-n-Points is described in REF: calib3d_solvePnP
  13964. <b>Note:</b>
  13965. <ul>
  13966. <li>
  13967. An example of how to use solvePnP for planar augmented reality can be found at
  13968. opencv_source_code/samples/python/plane_ar.py
  13969. </li>
  13970. <li>
  13971. If you are using Python:
  13972. <ul>
  13973. <li>
  13974. Numpy array slices won't work as input because solvePnP requires contiguous
  13975. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  13976. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  13977. </li>
  13978. <li>
  13979. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  13980. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  13981. which requires 2-channel information.
  13982. </li>
  13983. <li>
  13984. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  13985. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  13986. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  13987. </li>
  13988. </ul>
  13989. <li>
  13990. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  13991. unstable and sometimes give completely wrong results. If you pass one of these two
  13992. flags, REF: SOLVEPNP_EPNP method will be used instead.
  13993. </li>
  13994. <li>
  13995. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  13996. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  13997. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  13998. </li>
  13999. <li>
  14000. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14001. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14002. global solution to converge.
  14003. </li>
  14004. <li>
  14005. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14006. </li>
  14007. <li>
  14008. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14009. Number of input points must be 4. Object points must be defined in the following order:
  14010. <ul>
  14011. <li>
  14012. point 0: [-squareLength / 2, squareLength / 2, 0]
  14013. </li>
  14014. <li>
  14015. point 1: [ squareLength / 2, squareLength / 2, 0]
  14016. </li>
  14017. <li>
  14018. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14019. </li>
  14020. <li>
  14021. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14022. </li>
  14023. </ul>
  14024. <ul>
  14025. <li>
  14026. With REF: SOLVEPNP_SQPNP input points must be &gt;= 3
  14027. </li>
  14028. </ul>
  14029. </li>
  14030. </ul></dd>
  14031. <dt><span class="returnLabel">Returns:</span></dt>
  14032. <dd>automatically generated</dd>
  14033. </dl>
  14034. </li>
  14035. </ul>
  14036. <a name="solvePnP-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-">
  14037. <!-- -->
  14038. </a>
  14039. <ul class="blockList">
  14040. <li class="blockList">
  14041. <h4>solvePnP</h4>
  14042. <pre>public static&nbsp;boolean&nbsp;solvePnP(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  14043. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  14044. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14045. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  14046. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  14047. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  14048. boolean&nbsp;useExtrinsicGuess)</pre>
  14049. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14050. SEE: REF: calib3d_solvePnP
  14051. This function returns the rotation and the translation vectors that transform a 3D point expressed in the object
  14052. coordinate frame to the camera coordinate frame, using different methods:
  14053. <ul>
  14054. <li>
  14055. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): need 4 input points to return a unique solution.
  14056. </li>
  14057. <li>
  14058. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar.
  14059. </li>
  14060. <li>
  14061. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14062. Number of input points must be 4. Object points must be defined in the following order:
  14063. <ul>
  14064. <li>
  14065. point 0: [-squareLength / 2, squareLength / 2, 0]
  14066. </li>
  14067. <li>
  14068. point 1: [ squareLength / 2, squareLength / 2, 0]
  14069. </li>
  14070. <li>
  14071. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14072. </li>
  14073. <li>
  14074. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14075. </li>
  14076. </ul>
  14077. <li>
  14078. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14079. </li>
  14080. </ul></div>
  14081. <dl>
  14082. <dt><span class="paramLabel">Parameters:</span></dt>
  14083. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14084. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14085. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14086. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14087. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14088. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14089. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14090. assumed.</dd>
  14091. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  14092. the model coordinate system to the camera coordinate system.</dd>
  14093. <dd><code>tvec</code> - Output translation vector.</dd>
  14094. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  14095. the provided rvec and tvec values as initial approximations of the rotation and translation
  14096. vectors, respectively, and further optimizes them.
  14097. More information about Perspective-n-Points is described in REF: calib3d_solvePnP
  14098. <b>Note:</b>
  14099. <ul>
  14100. <li>
  14101. An example of how to use solvePnP for planar augmented reality can be found at
  14102. opencv_source_code/samples/python/plane_ar.py
  14103. </li>
  14104. <li>
  14105. If you are using Python:
  14106. <ul>
  14107. <li>
  14108. Numpy array slices won't work as input because solvePnP requires contiguous
  14109. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14110. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14111. </li>
  14112. <li>
  14113. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14114. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14115. which requires 2-channel information.
  14116. </li>
  14117. <li>
  14118. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14119. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14120. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14121. </li>
  14122. </ul>
  14123. <li>
  14124. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14125. unstable and sometimes give completely wrong results. If you pass one of these two
  14126. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14127. </li>
  14128. <li>
  14129. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14130. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14131. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14132. </li>
  14133. <li>
  14134. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14135. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14136. global solution to converge.
  14137. </li>
  14138. <li>
  14139. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14140. </li>
  14141. <li>
  14142. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14143. Number of input points must be 4. Object points must be defined in the following order:
  14144. <ul>
  14145. <li>
  14146. point 0: [-squareLength / 2, squareLength / 2, 0]
  14147. </li>
  14148. <li>
  14149. point 1: [ squareLength / 2, squareLength / 2, 0]
  14150. </li>
  14151. <li>
  14152. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14153. </li>
  14154. <li>
  14155. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14156. </li>
  14157. </ul>
  14158. <ul>
  14159. <li>
  14160. With REF: SOLVEPNP_SQPNP input points must be &gt;= 3
  14161. </li>
  14162. </ul>
  14163. </li>
  14164. </ul></dd>
  14165. <dt><span class="returnLabel">Returns:</span></dt>
  14166. <dd>automatically generated</dd>
  14167. </dl>
  14168. </li>
  14169. </ul>
  14170. <a name="solvePnP-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-">
  14171. <!-- -->
  14172. </a>
  14173. <ul class="blockList">
  14174. <li class="blockList">
  14175. <h4>solvePnP</h4>
  14176. <pre>public static&nbsp;boolean&nbsp;solvePnP(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  14177. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  14178. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14179. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  14180. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  14181. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  14182. boolean&nbsp;useExtrinsicGuess,
  14183. int&nbsp;flags)</pre>
  14184. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14185. SEE: REF: calib3d_solvePnP
  14186. This function returns the rotation and the translation vectors that transform a 3D point expressed in the object
  14187. coordinate frame to the camera coordinate frame, using different methods:
  14188. <ul>
  14189. <li>
  14190. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): need 4 input points to return a unique solution.
  14191. </li>
  14192. <li>
  14193. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar.
  14194. </li>
  14195. <li>
  14196. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14197. Number of input points must be 4. Object points must be defined in the following order:
  14198. <ul>
  14199. <li>
  14200. point 0: [-squareLength / 2, squareLength / 2, 0]
  14201. </li>
  14202. <li>
  14203. point 1: [ squareLength / 2, squareLength / 2, 0]
  14204. </li>
  14205. <li>
  14206. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14207. </li>
  14208. <li>
  14209. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14210. </li>
  14211. </ul>
  14212. <li>
  14213. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14214. </li>
  14215. </ul></div>
  14216. <dl>
  14217. <dt><span class="paramLabel">Parameters:</span></dt>
  14218. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14219. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14220. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14221. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14222. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14223. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14224. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14225. assumed.</dd>
  14226. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  14227. the model coordinate system to the camera coordinate system.</dd>
  14228. <dd><code>tvec</code> - Output translation vector.</dd>
  14229. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  14230. the provided rvec and tvec values as initial approximations of the rotation and translation
  14231. vectors, respectively, and further optimizes them.</dd>
  14232. <dd><code>flags</code> - Method for solving a PnP problem: see REF: calib3d_solvePnP_flags
  14233. More information about Perspective-n-Points is described in REF: calib3d_solvePnP
  14234. <b>Note:</b>
  14235. <ul>
  14236. <li>
  14237. An example of how to use solvePnP for planar augmented reality can be found at
  14238. opencv_source_code/samples/python/plane_ar.py
  14239. </li>
  14240. <li>
  14241. If you are using Python:
  14242. <ul>
  14243. <li>
  14244. Numpy array slices won't work as input because solvePnP requires contiguous
  14245. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14246. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14247. </li>
  14248. <li>
  14249. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14250. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14251. which requires 2-channel information.
  14252. </li>
  14253. <li>
  14254. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14255. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14256. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14257. </li>
  14258. </ul>
  14259. <li>
  14260. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14261. unstable and sometimes give completely wrong results. If you pass one of these two
  14262. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14263. </li>
  14264. <li>
  14265. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14266. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14267. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14268. </li>
  14269. <li>
  14270. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14271. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14272. global solution to converge.
  14273. </li>
  14274. <li>
  14275. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14276. </li>
  14277. <li>
  14278. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14279. Number of input points must be 4. Object points must be defined in the following order:
  14280. <ul>
  14281. <li>
  14282. point 0: [-squareLength / 2, squareLength / 2, 0]
  14283. </li>
  14284. <li>
  14285. point 1: [ squareLength / 2, squareLength / 2, 0]
  14286. </li>
  14287. <li>
  14288. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14289. </li>
  14290. <li>
  14291. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14292. </li>
  14293. </ul>
  14294. <ul>
  14295. <li>
  14296. With REF: SOLVEPNP_SQPNP input points must be &gt;= 3
  14297. </li>
  14298. </ul>
  14299. </li>
  14300. </ul></dd>
  14301. <dt><span class="returnLabel">Returns:</span></dt>
  14302. <dd>automatically generated</dd>
  14303. </dl>
  14304. </li>
  14305. </ul>
  14306. <a name="solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-">
  14307. <!-- -->
  14308. </a>
  14309. <ul class="blockList">
  14310. <li class="blockList">
  14311. <h4>solvePnPGeneric</h4>
  14312. <pre>public static&nbsp;int&nbsp;solvePnPGeneric(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  14313. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  14314. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14315. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  14316. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  14317. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs)</pre>
  14318. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14319. SEE: REF: calib3d_solvePnP
  14320. This function returns a list of all the possible solutions (a solution is a &lt;rotation vector, translation vector&gt;
  14321. couple), depending on the number of input points and the chosen method:
  14322. <ul>
  14323. <li>
  14324. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): 3 or 4 input points. Number of returned solutions can be between 0 and 4 with 3 input points.
  14325. </li>
  14326. <li>
  14327. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar. Returns 2 solutions.
  14328. </li>
  14329. <li>
  14330. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14331. Number of input points must be 4 and 2 solutions are returned. Object points must be defined in the following order:
  14332. <ul>
  14333. <li>
  14334. point 0: [-squareLength / 2, squareLength / 2, 0]
  14335. </li>
  14336. <li>
  14337. point 1: [ squareLength / 2, squareLength / 2, 0]
  14338. </li>
  14339. <li>
  14340. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14341. </li>
  14342. <li>
  14343. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14344. </li>
  14345. </ul>
  14346. <li>
  14347. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14348. Only 1 solution is returned.
  14349. </li>
  14350. </ul></div>
  14351. <dl>
  14352. <dt><span class="paramLabel">Parameters:</span></dt>
  14353. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14354. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14355. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14356. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14357. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14358. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14359. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14360. assumed.</dd>
  14361. <dd><code>rvecs</code> - Vector of output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  14362. the model coordinate system to the camera coordinate system.</dd>
  14363. <dd><code>tvecs</code> - Vector of output translation vectors.
  14364. the provided rvec and tvec values as initial approximations of the rotation and translation
  14365. vectors, respectively, and further optimizes them.
  14366. and useExtrinsicGuess is set to true.
  14367. and useExtrinsicGuess is set to true.
  14368. (\( \text{RMSE} = \sqrt{\frac{\sum_{i}^{N} \left ( \hat{y_i} - y_i \right )^2}{N}} \)) between the input image points
  14369. and the 3D object points projected with the estimated pose.
  14370. More information is described in REF: calib3d_solvePnP
  14371. <b>Note:</b>
  14372. <ul>
  14373. <li>
  14374. An example of how to use solvePnP for planar augmented reality can be found at
  14375. opencv_source_code/samples/python/plane_ar.py
  14376. </li>
  14377. <li>
  14378. If you are using Python:
  14379. <ul>
  14380. <li>
  14381. Numpy array slices won't work as input because solvePnP requires contiguous
  14382. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14383. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14384. </li>
  14385. <li>
  14386. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14387. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14388. which requires 2-channel information.
  14389. </li>
  14390. <li>
  14391. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14392. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14393. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14394. </li>
  14395. </ul>
  14396. <li>
  14397. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14398. unstable and sometimes give completely wrong results. If you pass one of these two
  14399. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14400. </li>
  14401. <li>
  14402. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14403. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14404. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14405. </li>
  14406. <li>
  14407. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14408. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14409. global solution to converge.
  14410. </li>
  14411. <li>
  14412. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14413. </li>
  14414. <li>
  14415. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14416. Number of input points must be 4. Object points must be defined in the following order:
  14417. <ul>
  14418. <li>
  14419. point 0: [-squareLength / 2, squareLength / 2, 0]
  14420. </li>
  14421. <li>
  14422. point 1: [ squareLength / 2, squareLength / 2, 0]
  14423. </li>
  14424. <li>
  14425. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14426. </li>
  14427. <li>
  14428. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14429. </li>
  14430. </ul>
  14431. </li>
  14432. </ul></dd>
  14433. <dt><span class="returnLabel">Returns:</span></dt>
  14434. <dd>automatically generated</dd>
  14435. </dl>
  14436. </li>
  14437. </ul>
  14438. <a name="solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-">
  14439. <!-- -->
  14440. </a>
  14441. <ul class="blockList">
  14442. <li class="blockList">
  14443. <h4>solvePnPGeneric</h4>
  14444. <pre>public static&nbsp;int&nbsp;solvePnPGeneric(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  14445. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  14446. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14447. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  14448. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  14449. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  14450. boolean&nbsp;useExtrinsicGuess)</pre>
  14451. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14452. SEE: REF: calib3d_solvePnP
  14453. This function returns a list of all the possible solutions (a solution is a &lt;rotation vector, translation vector&gt;
  14454. couple), depending on the number of input points and the chosen method:
  14455. <ul>
  14456. <li>
  14457. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): 3 or 4 input points. Number of returned solutions can be between 0 and 4 with 3 input points.
  14458. </li>
  14459. <li>
  14460. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar. Returns 2 solutions.
  14461. </li>
  14462. <li>
  14463. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14464. Number of input points must be 4 and 2 solutions are returned. Object points must be defined in the following order:
  14465. <ul>
  14466. <li>
  14467. point 0: [-squareLength / 2, squareLength / 2, 0]
  14468. </li>
  14469. <li>
  14470. point 1: [ squareLength / 2, squareLength / 2, 0]
  14471. </li>
  14472. <li>
  14473. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14474. </li>
  14475. <li>
  14476. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14477. </li>
  14478. </ul>
  14479. <li>
  14480. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14481. Only 1 solution is returned.
  14482. </li>
  14483. </ul></div>
  14484. <dl>
  14485. <dt><span class="paramLabel">Parameters:</span></dt>
  14486. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14487. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14488. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14489. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14490. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14491. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14492. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14493. assumed.</dd>
  14494. <dd><code>rvecs</code> - Vector of output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  14495. the model coordinate system to the camera coordinate system.</dd>
  14496. <dd><code>tvecs</code> - Vector of output translation vectors.</dd>
  14497. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  14498. the provided rvec and tvec values as initial approximations of the rotation and translation
  14499. vectors, respectively, and further optimizes them.
  14500. and useExtrinsicGuess is set to true.
  14501. and useExtrinsicGuess is set to true.
  14502. (\( \text{RMSE} = \sqrt{\frac{\sum_{i}^{N} \left ( \hat{y_i} - y_i \right )^2}{N}} \)) between the input image points
  14503. and the 3D object points projected with the estimated pose.
  14504. More information is described in REF: calib3d_solvePnP
  14505. <b>Note:</b>
  14506. <ul>
  14507. <li>
  14508. An example of how to use solvePnP for planar augmented reality can be found at
  14509. opencv_source_code/samples/python/plane_ar.py
  14510. </li>
  14511. <li>
  14512. If you are using Python:
  14513. <ul>
  14514. <li>
  14515. Numpy array slices won't work as input because solvePnP requires contiguous
  14516. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14517. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14518. </li>
  14519. <li>
  14520. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14521. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14522. which requires 2-channel information.
  14523. </li>
  14524. <li>
  14525. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14526. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14527. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14528. </li>
  14529. </ul>
  14530. <li>
  14531. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14532. unstable and sometimes give completely wrong results. If you pass one of these two
  14533. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14534. </li>
  14535. <li>
  14536. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14537. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14538. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14539. </li>
  14540. <li>
  14541. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14542. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14543. global solution to converge.
  14544. </li>
  14545. <li>
  14546. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14547. </li>
  14548. <li>
  14549. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14550. Number of input points must be 4. Object points must be defined in the following order:
  14551. <ul>
  14552. <li>
  14553. point 0: [-squareLength / 2, squareLength / 2, 0]
  14554. </li>
  14555. <li>
  14556. point 1: [ squareLength / 2, squareLength / 2, 0]
  14557. </li>
  14558. <li>
  14559. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14560. </li>
  14561. <li>
  14562. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14563. </li>
  14564. </ul>
  14565. </li>
  14566. </ul></dd>
  14567. <dt><span class="returnLabel">Returns:</span></dt>
  14568. <dd>automatically generated</dd>
  14569. </dl>
  14570. </li>
  14571. </ul>
  14572. <a name="solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-">
  14573. <!-- -->
  14574. </a>
  14575. <ul class="blockList">
  14576. <li class="blockList">
  14577. <h4>solvePnPGeneric</h4>
  14578. <pre>public static&nbsp;int&nbsp;solvePnPGeneric(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  14579. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  14580. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14581. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  14582. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  14583. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  14584. boolean&nbsp;useExtrinsicGuess,
  14585. int&nbsp;flags)</pre>
  14586. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14587. SEE: REF: calib3d_solvePnP
  14588. This function returns a list of all the possible solutions (a solution is a &lt;rotation vector, translation vector&gt;
  14589. couple), depending on the number of input points and the chosen method:
  14590. <ul>
  14591. <li>
  14592. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): 3 or 4 input points. Number of returned solutions can be between 0 and 4 with 3 input points.
  14593. </li>
  14594. <li>
  14595. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar. Returns 2 solutions.
  14596. </li>
  14597. <li>
  14598. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14599. Number of input points must be 4 and 2 solutions are returned. Object points must be defined in the following order:
  14600. <ul>
  14601. <li>
  14602. point 0: [-squareLength / 2, squareLength / 2, 0]
  14603. </li>
  14604. <li>
  14605. point 1: [ squareLength / 2, squareLength / 2, 0]
  14606. </li>
  14607. <li>
  14608. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14609. </li>
  14610. <li>
  14611. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14612. </li>
  14613. </ul>
  14614. <li>
  14615. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14616. Only 1 solution is returned.
  14617. </li>
  14618. </ul></div>
  14619. <dl>
  14620. <dt><span class="paramLabel">Parameters:</span></dt>
  14621. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14622. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14623. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14624. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14625. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14626. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14627. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14628. assumed.</dd>
  14629. <dd><code>rvecs</code> - Vector of output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  14630. the model coordinate system to the camera coordinate system.</dd>
  14631. <dd><code>tvecs</code> - Vector of output translation vectors.</dd>
  14632. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  14633. the provided rvec and tvec values as initial approximations of the rotation and translation
  14634. vectors, respectively, and further optimizes them.</dd>
  14635. <dd><code>flags</code> - Method for solving a PnP problem: see REF: calib3d_solvePnP_flags
  14636. and useExtrinsicGuess is set to true.
  14637. and useExtrinsicGuess is set to true.
  14638. (\( \text{RMSE} = \sqrt{\frac{\sum_{i}^{N} \left ( \hat{y_i} - y_i \right )^2}{N}} \)) between the input image points
  14639. and the 3D object points projected with the estimated pose.
  14640. More information is described in REF: calib3d_solvePnP
  14641. <b>Note:</b>
  14642. <ul>
  14643. <li>
  14644. An example of how to use solvePnP for planar augmented reality can be found at
  14645. opencv_source_code/samples/python/plane_ar.py
  14646. </li>
  14647. <li>
  14648. If you are using Python:
  14649. <ul>
  14650. <li>
  14651. Numpy array slices won't work as input because solvePnP requires contiguous
  14652. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14653. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14654. </li>
  14655. <li>
  14656. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14657. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14658. which requires 2-channel information.
  14659. </li>
  14660. <li>
  14661. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14662. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14663. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14664. </li>
  14665. </ul>
  14666. <li>
  14667. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14668. unstable and sometimes give completely wrong results. If you pass one of these two
  14669. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14670. </li>
  14671. <li>
  14672. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14673. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14674. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14675. </li>
  14676. <li>
  14677. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14678. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14679. global solution to converge.
  14680. </li>
  14681. <li>
  14682. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14683. </li>
  14684. <li>
  14685. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14686. Number of input points must be 4. Object points must be defined in the following order:
  14687. <ul>
  14688. <li>
  14689. point 0: [-squareLength / 2, squareLength / 2, 0]
  14690. </li>
  14691. <li>
  14692. point 1: [ squareLength / 2, squareLength / 2, 0]
  14693. </li>
  14694. <li>
  14695. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14696. </li>
  14697. <li>
  14698. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14699. </li>
  14700. </ul>
  14701. </li>
  14702. </ul></dd>
  14703. <dt><span class="returnLabel">Returns:</span></dt>
  14704. <dd>automatically generated</dd>
  14705. </dl>
  14706. </li>
  14707. </ul>
  14708. <a name="solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-org.opencv.core.Mat-">
  14709. <!-- -->
  14710. </a>
  14711. <ul class="blockList">
  14712. <li class="blockList">
  14713. <h4>solvePnPGeneric</h4>
  14714. <pre>public static&nbsp;int&nbsp;solvePnPGeneric(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  14715. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  14716. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14717. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  14718. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  14719. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  14720. boolean&nbsp;useExtrinsicGuess,
  14721. int&nbsp;flags,
  14722. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec)</pre>
  14723. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14724. SEE: REF: calib3d_solvePnP
  14725. This function returns a list of all the possible solutions (a solution is a &lt;rotation vector, translation vector&gt;
  14726. couple), depending on the number of input points and the chosen method:
  14727. <ul>
  14728. <li>
  14729. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): 3 or 4 input points. Number of returned solutions can be between 0 and 4 with 3 input points.
  14730. </li>
  14731. <li>
  14732. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar. Returns 2 solutions.
  14733. </li>
  14734. <li>
  14735. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14736. Number of input points must be 4 and 2 solutions are returned. Object points must be defined in the following order:
  14737. <ul>
  14738. <li>
  14739. point 0: [-squareLength / 2, squareLength / 2, 0]
  14740. </li>
  14741. <li>
  14742. point 1: [ squareLength / 2, squareLength / 2, 0]
  14743. </li>
  14744. <li>
  14745. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14746. </li>
  14747. <li>
  14748. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14749. </li>
  14750. </ul>
  14751. <li>
  14752. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14753. Only 1 solution is returned.
  14754. </li>
  14755. </ul></div>
  14756. <dl>
  14757. <dt><span class="paramLabel">Parameters:</span></dt>
  14758. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14759. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14760. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14761. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14762. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14763. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14764. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14765. assumed.</dd>
  14766. <dd><code>rvecs</code> - Vector of output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  14767. the model coordinate system to the camera coordinate system.</dd>
  14768. <dd><code>tvecs</code> - Vector of output translation vectors.</dd>
  14769. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  14770. the provided rvec and tvec values as initial approximations of the rotation and translation
  14771. vectors, respectively, and further optimizes them.</dd>
  14772. <dd><code>flags</code> - Method for solving a PnP problem: see REF: calib3d_solvePnP_flags</dd>
  14773. <dd><code>rvec</code> - Rotation vector used to initialize an iterative PnP refinement algorithm, when flag is REF: SOLVEPNP_ITERATIVE
  14774. and useExtrinsicGuess is set to true.
  14775. and useExtrinsicGuess is set to true.
  14776. (\( \text{RMSE} = \sqrt{\frac{\sum_{i}^{N} \left ( \hat{y_i} - y_i \right )^2}{N}} \)) between the input image points
  14777. and the 3D object points projected with the estimated pose.
  14778. More information is described in REF: calib3d_solvePnP
  14779. <b>Note:</b>
  14780. <ul>
  14781. <li>
  14782. An example of how to use solvePnP for planar augmented reality can be found at
  14783. opencv_source_code/samples/python/plane_ar.py
  14784. </li>
  14785. <li>
  14786. If you are using Python:
  14787. <ul>
  14788. <li>
  14789. Numpy array slices won't work as input because solvePnP requires contiguous
  14790. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14791. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14792. </li>
  14793. <li>
  14794. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14795. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14796. which requires 2-channel information.
  14797. </li>
  14798. <li>
  14799. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14800. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14801. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14802. </li>
  14803. </ul>
  14804. <li>
  14805. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14806. unstable and sometimes give completely wrong results. If you pass one of these two
  14807. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14808. </li>
  14809. <li>
  14810. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14811. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14812. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14813. </li>
  14814. <li>
  14815. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14816. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14817. global solution to converge.
  14818. </li>
  14819. <li>
  14820. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14821. </li>
  14822. <li>
  14823. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14824. Number of input points must be 4. Object points must be defined in the following order:
  14825. <ul>
  14826. <li>
  14827. point 0: [-squareLength / 2, squareLength / 2, 0]
  14828. </li>
  14829. <li>
  14830. point 1: [ squareLength / 2, squareLength / 2, 0]
  14831. </li>
  14832. <li>
  14833. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14834. </li>
  14835. <li>
  14836. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14837. </li>
  14838. </ul>
  14839. </li>
  14840. </ul></dd>
  14841. <dt><span class="returnLabel">Returns:</span></dt>
  14842. <dd>automatically generated</dd>
  14843. </dl>
  14844. </li>
  14845. </ul>
  14846. <a name="solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-org.opencv.core.Mat-org.opencv.core.Mat-">
  14847. <!-- -->
  14848. </a>
  14849. <ul class="blockList">
  14850. <li class="blockList">
  14851. <h4>solvePnPGeneric</h4>
  14852. <pre>public static&nbsp;int&nbsp;solvePnPGeneric(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  14853. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  14854. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14855. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  14856. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  14857. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  14858. boolean&nbsp;useExtrinsicGuess,
  14859. int&nbsp;flags,
  14860. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  14861. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</pre>
  14862. <div class="block">Finds an object pose from 3D-2D point correspondences.
  14863. SEE: REF: calib3d_solvePnP
  14864. This function returns a list of all the possible solutions (a solution is a &lt;rotation vector, translation vector&gt;
  14865. couple), depending on the number of input points and the chosen method:
  14866. <ul>
  14867. <li>
  14868. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): 3 or 4 input points. Number of returned solutions can be between 0 and 4 with 3 input points.
  14869. </li>
  14870. <li>
  14871. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar. Returns 2 solutions.
  14872. </li>
  14873. <li>
  14874. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  14875. Number of input points must be 4 and 2 solutions are returned. Object points must be defined in the following order:
  14876. <ul>
  14877. <li>
  14878. point 0: [-squareLength / 2, squareLength / 2, 0]
  14879. </li>
  14880. <li>
  14881. point 1: [ squareLength / 2, squareLength / 2, 0]
  14882. </li>
  14883. <li>
  14884. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14885. </li>
  14886. <li>
  14887. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14888. </li>
  14889. </ul>
  14890. <li>
  14891. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  14892. Only 1 solution is returned.
  14893. </li>
  14894. </ul></div>
  14895. <dl>
  14896. <dt><span class="paramLabel">Parameters:</span></dt>
  14897. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  14898. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  14899. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  14900. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  14901. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  14902. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  14903. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  14904. assumed.</dd>
  14905. <dd><code>rvecs</code> - Vector of output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  14906. the model coordinate system to the camera coordinate system.</dd>
  14907. <dd><code>tvecs</code> - Vector of output translation vectors.</dd>
  14908. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  14909. the provided rvec and tvec values as initial approximations of the rotation and translation
  14910. vectors, respectively, and further optimizes them.</dd>
  14911. <dd><code>flags</code> - Method for solving a PnP problem: see REF: calib3d_solvePnP_flags</dd>
  14912. <dd><code>rvec</code> - Rotation vector used to initialize an iterative PnP refinement algorithm, when flag is REF: SOLVEPNP_ITERATIVE
  14913. and useExtrinsicGuess is set to true.</dd>
  14914. <dd><code>tvec</code> - Translation vector used to initialize an iterative PnP refinement algorithm, when flag is REF: SOLVEPNP_ITERATIVE
  14915. and useExtrinsicGuess is set to true.
  14916. (\( \text{RMSE} = \sqrt{\frac{\sum_{i}^{N} \left ( \hat{y_i} - y_i \right )^2}{N}} \)) between the input image points
  14917. and the 3D object points projected with the estimated pose.
  14918. More information is described in REF: calib3d_solvePnP
  14919. <b>Note:</b>
  14920. <ul>
  14921. <li>
  14922. An example of how to use solvePnP for planar augmented reality can be found at
  14923. opencv_source_code/samples/python/plane_ar.py
  14924. </li>
  14925. <li>
  14926. If you are using Python:
  14927. <ul>
  14928. <li>
  14929. Numpy array slices won't work as input because solvePnP requires contiguous
  14930. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  14931. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14932. </li>
  14933. <li>
  14934. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  14935. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  14936. which requires 2-channel information.
  14937. </li>
  14938. <li>
  14939. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  14940. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  14941. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  14942. </li>
  14943. </ul>
  14944. <li>
  14945. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  14946. unstable and sometimes give completely wrong results. If you pass one of these two
  14947. flags, REF: SOLVEPNP_EPNP method will be used instead.
  14948. </li>
  14949. <li>
  14950. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  14951. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  14952. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  14953. </li>
  14954. <li>
  14955. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  14956. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  14957. global solution to converge.
  14958. </li>
  14959. <li>
  14960. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  14961. </li>
  14962. <li>
  14963. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  14964. Number of input points must be 4. Object points must be defined in the following order:
  14965. <ul>
  14966. <li>
  14967. point 0: [-squareLength / 2, squareLength / 2, 0]
  14968. </li>
  14969. <li>
  14970. point 1: [ squareLength / 2, squareLength / 2, 0]
  14971. </li>
  14972. <li>
  14973. point 2: [ squareLength / 2, -squareLength / 2, 0]
  14974. </li>
  14975. <li>
  14976. point 3: [-squareLength / 2, -squareLength / 2, 0]
  14977. </li>
  14978. </ul>
  14979. </li>
  14980. </ul></dd>
  14981. <dt><span class="returnLabel">Returns:</span></dt>
  14982. <dd>automatically generated</dd>
  14983. </dl>
  14984. </li>
  14985. </ul>
  14986. <a name="solvePnPGeneric-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-boolean-int-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  14987. <!-- -->
  14988. </a>
  14989. <ul class="blockList">
  14990. <li class="blockList">
  14991. <h4>solvePnPGeneric</h4>
  14992. <pre>public static&nbsp;int&nbsp;solvePnPGeneric(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  14993. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  14994. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  14995. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  14996. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  14997. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  14998. boolean&nbsp;useExtrinsicGuess,
  14999. int&nbsp;flags,
  15000. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15001. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15002. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;reprojectionError)</pre>
  15003. <div class="block">Finds an object pose from 3D-2D point correspondences.
  15004. SEE: REF: calib3d_solvePnP
  15005. This function returns a list of all the possible solutions (a solution is a &lt;rotation vector, translation vector&gt;
  15006. couple), depending on the number of input points and the chosen method:
  15007. <ul>
  15008. <li>
  15009. P3P methods (REF: SOLVEPNP_P3P, REF: SOLVEPNP_AP3P): 3 or 4 input points. Number of returned solutions can be between 0 and 4 with 3 input points.
  15010. </li>
  15011. <li>
  15012. REF: SOLVEPNP_IPPE Input points must be &gt;= 4 and object points must be coplanar. Returns 2 solutions.
  15013. </li>
  15014. <li>
  15015. REF: SOLVEPNP_IPPE_SQUARE Special case suitable for marker pose estimation.
  15016. Number of input points must be 4 and 2 solutions are returned. Object points must be defined in the following order:
  15017. <ul>
  15018. <li>
  15019. point 0: [-squareLength / 2, squareLength / 2, 0]
  15020. </li>
  15021. <li>
  15022. point 1: [ squareLength / 2, squareLength / 2, 0]
  15023. </li>
  15024. <li>
  15025. point 2: [ squareLength / 2, -squareLength / 2, 0]
  15026. </li>
  15027. <li>
  15028. point 3: [-squareLength / 2, -squareLength / 2, 0]
  15029. </li>
  15030. </ul>
  15031. <li>
  15032. for all the other flags, number of input points must be &gt;= 4 and object points can be in any configuration.
  15033. Only 1 solution is returned.
  15034. </li>
  15035. </ul></div>
  15036. <dl>
  15037. <dt><span class="paramLabel">Parameters:</span></dt>
  15038. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15039. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15040. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15041. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15042. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15043. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15044. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15045. assumed.</dd>
  15046. <dd><code>rvecs</code> - Vector of output rotation vectors (see REF: Rodrigues ) that, together with tvecs, brings points from
  15047. the model coordinate system to the camera coordinate system.</dd>
  15048. <dd><code>tvecs</code> - Vector of output translation vectors.</dd>
  15049. <dd><code>useExtrinsicGuess</code> - Parameter used for #SOLVEPNP_ITERATIVE. If true (1), the function uses
  15050. the provided rvec and tvec values as initial approximations of the rotation and translation
  15051. vectors, respectively, and further optimizes them.</dd>
  15052. <dd><code>flags</code> - Method for solving a PnP problem: see REF: calib3d_solvePnP_flags</dd>
  15053. <dd><code>rvec</code> - Rotation vector used to initialize an iterative PnP refinement algorithm, when flag is REF: SOLVEPNP_ITERATIVE
  15054. and useExtrinsicGuess is set to true.</dd>
  15055. <dd><code>tvec</code> - Translation vector used to initialize an iterative PnP refinement algorithm, when flag is REF: SOLVEPNP_ITERATIVE
  15056. and useExtrinsicGuess is set to true.</dd>
  15057. <dd><code>reprojectionError</code> - Optional vector of reprojection error, that is the RMS error
  15058. (\( \text{RMSE} = \sqrt{\frac{\sum_{i}^{N} \left ( \hat{y_i} - y_i \right )^2}{N}} \)) between the input image points
  15059. and the 3D object points projected with the estimated pose.
  15060. More information is described in REF: calib3d_solvePnP
  15061. <b>Note:</b>
  15062. <ul>
  15063. <li>
  15064. An example of how to use solvePnP for planar augmented reality can be found at
  15065. opencv_source_code/samples/python/plane_ar.py
  15066. </li>
  15067. <li>
  15068. If you are using Python:
  15069. <ul>
  15070. <li>
  15071. Numpy array slices won't work as input because solvePnP requires contiguous
  15072. arrays (enforced by the assertion using cv::Mat::checkVector() around line 55 of
  15073. modules/calib3d/src/solvepnp.cpp version 2.4.9)
  15074. </li>
  15075. <li>
  15076. The P3P algorithm requires image points to be in an array of shape (N,1,2) due
  15077. to its calling of #undistortPoints (around line 75 of modules/calib3d/src/solvepnp.cpp version 2.4.9)
  15078. which requires 2-channel information.
  15079. </li>
  15080. <li>
  15081. Thus, given some data D = np.array(...) where D.shape = (N,M), in order to use a subset of
  15082. it as, e.g., imagePoints, one must effectively copy it into a new array: imagePoints =
  15083. np.ascontiguousarray(D[:,:2]).reshape((N,1,2))
  15084. </li>
  15085. </ul>
  15086. <li>
  15087. The methods REF: SOLVEPNP_DLS and REF: SOLVEPNP_UPNP cannot be used as the current implementations are
  15088. unstable and sometimes give completely wrong results. If you pass one of these two
  15089. flags, REF: SOLVEPNP_EPNP method will be used instead.
  15090. </li>
  15091. <li>
  15092. The minimum number of points is 4 in the general case. In the case of REF: SOLVEPNP_P3P and REF: SOLVEPNP_AP3P
  15093. methods, it is required to use exactly 4 points (the first 3 points are used to estimate all the solutions
  15094. of the P3P problem, the last one is used to retain the best solution that minimizes the reprojection error).
  15095. </li>
  15096. <li>
  15097. With REF: SOLVEPNP_ITERATIVE method and <code>useExtrinsicGuess=true</code>, the minimum number of points is 3 (3 points
  15098. are sufficient to compute a pose but there are up to 4 solutions). The initial solution should be close to the
  15099. global solution to converge.
  15100. </li>
  15101. <li>
  15102. With REF: SOLVEPNP_IPPE input points must be &gt;= 4 and object points must be coplanar.
  15103. </li>
  15104. <li>
  15105. With REF: SOLVEPNP_IPPE_SQUARE this is a special case suitable for marker pose estimation.
  15106. Number of input points must be 4. Object points must be defined in the following order:
  15107. <ul>
  15108. <li>
  15109. point 0: [-squareLength / 2, squareLength / 2, 0]
  15110. </li>
  15111. <li>
  15112. point 1: [ squareLength / 2, squareLength / 2, 0]
  15113. </li>
  15114. <li>
  15115. point 2: [ squareLength / 2, -squareLength / 2, 0]
  15116. </li>
  15117. <li>
  15118. point 3: [-squareLength / 2, -squareLength / 2, 0]
  15119. </li>
  15120. </ul>
  15121. </li>
  15122. </ul></dd>
  15123. <dt><span class="returnLabel">Returns:</span></dt>
  15124. <dd>automatically generated</dd>
  15125. </dl>
  15126. </li>
  15127. </ul>
  15128. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-">
  15129. <!-- -->
  15130. </a>
  15131. <ul class="blockList">
  15132. <li class="blockList">
  15133. <h4>solvePnPRansac</h4>
  15134. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15135. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15136. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15137. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15138. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15139. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</pre>
  15140. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15141. SEE: REF: calib3d_solvePnP</div>
  15142. <dl>
  15143. <dt><span class="paramLabel">Parameters:</span></dt>
  15144. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15145. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15146. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15147. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15148. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15149. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15150. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15151. assumed.</dd>
  15152. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15153. the model coordinate system to the camera coordinate system.</dd>
  15154. <dd><code>tvec</code> - Output translation vector.
  15155. the provided rvec and tvec values as initial approximations of the rotation and translation
  15156. vectors, respectively, and further optimizes them.
  15157. is the maximum allowed distance between the observed and computed point projections to consider it
  15158. an inlier.
  15159. The function estimates an object pose given a set of object points, their corresponding image
  15160. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15161. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15162. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15163. makes the function resistant to outliers.
  15164. <b>Note:</b>
  15165. <ul>
  15166. <li>
  15167. An example of how to use solvePNPRansac for object detection can be found at
  15168. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15169. </li>
  15170. <li>
  15171. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15172. is #SOLVEPNP_EPNP. Exceptions are:
  15173. <ul>
  15174. <li>
  15175. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15176. </li>
  15177. <li>
  15178. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15179. </li>
  15180. </ul>
  15181. <li>
  15182. The method used to estimate the camera pose using all the inliers is defined by the
  15183. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15184. the method #SOLVEPNP_EPNP will be used instead.
  15185. </li>
  15186. </ul></dd>
  15187. <dt><span class="returnLabel">Returns:</span></dt>
  15188. <dd>automatically generated</dd>
  15189. </dl>
  15190. </li>
  15191. </ul>
  15192. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-">
  15193. <!-- -->
  15194. </a>
  15195. <ul class="blockList">
  15196. <li class="blockList">
  15197. <h4>solvePnPRansac</h4>
  15198. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15199. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15200. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15201. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15202. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15203. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15204. boolean&nbsp;useExtrinsicGuess)</pre>
  15205. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15206. SEE: REF: calib3d_solvePnP</div>
  15207. <dl>
  15208. <dt><span class="paramLabel">Parameters:</span></dt>
  15209. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15210. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15211. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15212. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15213. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15214. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15215. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15216. assumed.</dd>
  15217. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15218. the model coordinate system to the camera coordinate system.</dd>
  15219. <dd><code>tvec</code> - Output translation vector.</dd>
  15220. <dd><code>useExtrinsicGuess</code> - Parameter used for REF: SOLVEPNP_ITERATIVE. If true (1), the function uses
  15221. the provided rvec and tvec values as initial approximations of the rotation and translation
  15222. vectors, respectively, and further optimizes them.
  15223. is the maximum allowed distance between the observed and computed point projections to consider it
  15224. an inlier.
  15225. The function estimates an object pose given a set of object points, their corresponding image
  15226. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15227. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15228. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15229. makes the function resistant to outliers.
  15230. <b>Note:</b>
  15231. <ul>
  15232. <li>
  15233. An example of how to use solvePNPRansac for object detection can be found at
  15234. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15235. </li>
  15236. <li>
  15237. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15238. is #SOLVEPNP_EPNP. Exceptions are:
  15239. <ul>
  15240. <li>
  15241. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15242. </li>
  15243. <li>
  15244. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15245. </li>
  15246. </ul>
  15247. <li>
  15248. The method used to estimate the camera pose using all the inliers is defined by the
  15249. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15250. the method #SOLVEPNP_EPNP will be used instead.
  15251. </li>
  15252. </ul></dd>
  15253. <dt><span class="returnLabel">Returns:</span></dt>
  15254. <dd>automatically generated</dd>
  15255. </dl>
  15256. </li>
  15257. </ul>
  15258. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-">
  15259. <!-- -->
  15260. </a>
  15261. <ul class="blockList">
  15262. <li class="blockList">
  15263. <h4>solvePnPRansac</h4>
  15264. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15265. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15266. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15267. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15268. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15269. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15270. boolean&nbsp;useExtrinsicGuess,
  15271. int&nbsp;iterationsCount)</pre>
  15272. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15273. SEE: REF: calib3d_solvePnP</div>
  15274. <dl>
  15275. <dt><span class="paramLabel">Parameters:</span></dt>
  15276. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15277. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15278. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15279. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15280. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15281. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15282. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15283. assumed.</dd>
  15284. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15285. the model coordinate system to the camera coordinate system.</dd>
  15286. <dd><code>tvec</code> - Output translation vector.</dd>
  15287. <dd><code>useExtrinsicGuess</code> - Parameter used for REF: SOLVEPNP_ITERATIVE. If true (1), the function uses
  15288. the provided rvec and tvec values as initial approximations of the rotation and translation
  15289. vectors, respectively, and further optimizes them.</dd>
  15290. <dd><code>iterationsCount</code> - Number of iterations.
  15291. is the maximum allowed distance between the observed and computed point projections to consider it
  15292. an inlier.
  15293. The function estimates an object pose given a set of object points, their corresponding image
  15294. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15295. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15296. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15297. makes the function resistant to outliers.
  15298. <b>Note:</b>
  15299. <ul>
  15300. <li>
  15301. An example of how to use solvePNPRansac for object detection can be found at
  15302. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15303. </li>
  15304. <li>
  15305. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15306. is #SOLVEPNP_EPNP. Exceptions are:
  15307. <ul>
  15308. <li>
  15309. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15310. </li>
  15311. <li>
  15312. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15313. </li>
  15314. </ul>
  15315. <li>
  15316. The method used to estimate the camera pose using all the inliers is defined by the
  15317. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15318. the method #SOLVEPNP_EPNP will be used instead.
  15319. </li>
  15320. </ul></dd>
  15321. <dt><span class="returnLabel">Returns:</span></dt>
  15322. <dd>automatically generated</dd>
  15323. </dl>
  15324. </li>
  15325. </ul>
  15326. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-">
  15327. <!-- -->
  15328. </a>
  15329. <ul class="blockList">
  15330. <li class="blockList">
  15331. <h4>solvePnPRansac</h4>
  15332. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15333. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15334. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15335. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15336. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15337. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15338. boolean&nbsp;useExtrinsicGuess,
  15339. int&nbsp;iterationsCount,
  15340. float&nbsp;reprojectionError)</pre>
  15341. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15342. SEE: REF: calib3d_solvePnP</div>
  15343. <dl>
  15344. <dt><span class="paramLabel">Parameters:</span></dt>
  15345. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15346. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15347. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15348. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15349. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15350. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15351. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15352. assumed.</dd>
  15353. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15354. the model coordinate system to the camera coordinate system.</dd>
  15355. <dd><code>tvec</code> - Output translation vector.</dd>
  15356. <dd><code>useExtrinsicGuess</code> - Parameter used for REF: SOLVEPNP_ITERATIVE. If true (1), the function uses
  15357. the provided rvec and tvec values as initial approximations of the rotation and translation
  15358. vectors, respectively, and further optimizes them.</dd>
  15359. <dd><code>iterationsCount</code> - Number of iterations.</dd>
  15360. <dd><code>reprojectionError</code> - Inlier threshold value used by the RANSAC procedure. The parameter value
  15361. is the maximum allowed distance between the observed and computed point projections to consider it
  15362. an inlier.
  15363. The function estimates an object pose given a set of object points, their corresponding image
  15364. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15365. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15366. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15367. makes the function resistant to outliers.
  15368. <b>Note:</b>
  15369. <ul>
  15370. <li>
  15371. An example of how to use solvePNPRansac for object detection can be found at
  15372. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15373. </li>
  15374. <li>
  15375. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15376. is #SOLVEPNP_EPNP. Exceptions are:
  15377. <ul>
  15378. <li>
  15379. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15380. </li>
  15381. <li>
  15382. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15383. </li>
  15384. </ul>
  15385. <li>
  15386. The method used to estimate the camera pose using all the inliers is defined by the
  15387. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15388. the method #SOLVEPNP_EPNP will be used instead.
  15389. </li>
  15390. </ul></dd>
  15391. <dt><span class="returnLabel">Returns:</span></dt>
  15392. <dd>automatically generated</dd>
  15393. </dl>
  15394. </li>
  15395. </ul>
  15396. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-double-">
  15397. <!-- -->
  15398. </a>
  15399. <ul class="blockList">
  15400. <li class="blockList">
  15401. <h4>solvePnPRansac</h4>
  15402. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15403. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15404. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15405. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15406. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15407. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15408. boolean&nbsp;useExtrinsicGuess,
  15409. int&nbsp;iterationsCount,
  15410. float&nbsp;reprojectionError,
  15411. double&nbsp;confidence)</pre>
  15412. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15413. SEE: REF: calib3d_solvePnP</div>
  15414. <dl>
  15415. <dt><span class="paramLabel">Parameters:</span></dt>
  15416. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15417. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15418. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15419. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15420. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15421. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15422. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15423. assumed.</dd>
  15424. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15425. the model coordinate system to the camera coordinate system.</dd>
  15426. <dd><code>tvec</code> - Output translation vector.</dd>
  15427. <dd><code>useExtrinsicGuess</code> - Parameter used for REF: SOLVEPNP_ITERATIVE. If true (1), the function uses
  15428. the provided rvec and tvec values as initial approximations of the rotation and translation
  15429. vectors, respectively, and further optimizes them.</dd>
  15430. <dd><code>iterationsCount</code> - Number of iterations.</dd>
  15431. <dd><code>reprojectionError</code> - Inlier threshold value used by the RANSAC procedure. The parameter value
  15432. is the maximum allowed distance between the observed and computed point projections to consider it
  15433. an inlier.</dd>
  15434. <dd><code>confidence</code> - The probability that the algorithm produces a useful result.
  15435. The function estimates an object pose given a set of object points, their corresponding image
  15436. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15437. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15438. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15439. makes the function resistant to outliers.
  15440. <b>Note:</b>
  15441. <ul>
  15442. <li>
  15443. An example of how to use solvePNPRansac for object detection can be found at
  15444. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15445. </li>
  15446. <li>
  15447. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15448. is #SOLVEPNP_EPNP. Exceptions are:
  15449. <ul>
  15450. <li>
  15451. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15452. </li>
  15453. <li>
  15454. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15455. </li>
  15456. </ul>
  15457. <li>
  15458. The method used to estimate the camera pose using all the inliers is defined by the
  15459. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15460. the method #SOLVEPNP_EPNP will be used instead.
  15461. </li>
  15462. </ul></dd>
  15463. <dt><span class="returnLabel">Returns:</span></dt>
  15464. <dd>automatically generated</dd>
  15465. </dl>
  15466. </li>
  15467. </ul>
  15468. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-double-org.opencv.core.Mat-">
  15469. <!-- -->
  15470. </a>
  15471. <ul class="blockList">
  15472. <li class="blockList">
  15473. <h4>solvePnPRansac</h4>
  15474. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15475. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15476. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15477. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15478. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15479. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15480. boolean&nbsp;useExtrinsicGuess,
  15481. int&nbsp;iterationsCount,
  15482. float&nbsp;reprojectionError,
  15483. double&nbsp;confidence,
  15484. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</pre>
  15485. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15486. SEE: REF: calib3d_solvePnP</div>
  15487. <dl>
  15488. <dt><span class="paramLabel">Parameters:</span></dt>
  15489. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15490. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15491. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15492. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15493. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15494. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15495. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15496. assumed.</dd>
  15497. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15498. the model coordinate system to the camera coordinate system.</dd>
  15499. <dd><code>tvec</code> - Output translation vector.</dd>
  15500. <dd><code>useExtrinsicGuess</code> - Parameter used for REF: SOLVEPNP_ITERATIVE. If true (1), the function uses
  15501. the provided rvec and tvec values as initial approximations of the rotation and translation
  15502. vectors, respectively, and further optimizes them.</dd>
  15503. <dd><code>iterationsCount</code> - Number of iterations.</dd>
  15504. <dd><code>reprojectionError</code> - Inlier threshold value used by the RANSAC procedure. The parameter value
  15505. is the maximum allowed distance between the observed and computed point projections to consider it
  15506. an inlier.</dd>
  15507. <dd><code>confidence</code> - The probability that the algorithm produces a useful result.</dd>
  15508. <dd><code>inliers</code> - Output vector that contains indices of inliers in objectPoints and imagePoints .
  15509. The function estimates an object pose given a set of object points, their corresponding image
  15510. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15511. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15512. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15513. makes the function resistant to outliers.
  15514. <b>Note:</b>
  15515. <ul>
  15516. <li>
  15517. An example of how to use solvePNPRansac for object detection can be found at
  15518. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15519. </li>
  15520. <li>
  15521. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15522. is #SOLVEPNP_EPNP. Exceptions are:
  15523. <ul>
  15524. <li>
  15525. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15526. </li>
  15527. <li>
  15528. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15529. </li>
  15530. </ul>
  15531. <li>
  15532. The method used to estimate the camera pose using all the inliers is defined by the
  15533. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15534. the method #SOLVEPNP_EPNP will be used instead.
  15535. </li>
  15536. </ul></dd>
  15537. <dt><span class="returnLabel">Returns:</span></dt>
  15538. <dd>automatically generated</dd>
  15539. </dl>
  15540. </li>
  15541. </ul>
  15542. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-boolean-int-float-double-org.opencv.core.Mat-int-">
  15543. <!-- -->
  15544. </a>
  15545. <ul class="blockList">
  15546. <li class="blockList">
  15547. <h4>solvePnPRansac</h4>
  15548. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15549. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15550. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15551. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15552. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15553. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15554. boolean&nbsp;useExtrinsicGuess,
  15555. int&nbsp;iterationsCount,
  15556. float&nbsp;reprojectionError,
  15557. double&nbsp;confidence,
  15558. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  15559. int&nbsp;flags)</pre>
  15560. <div class="block">Finds an object pose from 3D-2D point correspondences using the RANSAC scheme.
  15561. SEE: REF: calib3d_solvePnP</div>
  15562. <dl>
  15563. <dt><span class="paramLabel">Parameters:</span></dt>
  15564. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or
  15565. 1xN/Nx1 3-channel, where N is the number of points. vector&lt;Point3d&gt; can be also passed here.</dd>
  15566. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15567. where N is the number of points. vector&lt;Point2d&gt; can be also passed here.</dd>
  15568. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15569. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15570. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15571. assumed.</dd>
  15572. <dd><code>rvec</code> - Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15573. the model coordinate system to the camera coordinate system.</dd>
  15574. <dd><code>tvec</code> - Output translation vector.</dd>
  15575. <dd><code>useExtrinsicGuess</code> - Parameter used for REF: SOLVEPNP_ITERATIVE. If true (1), the function uses
  15576. the provided rvec and tvec values as initial approximations of the rotation and translation
  15577. vectors, respectively, and further optimizes them.</dd>
  15578. <dd><code>iterationsCount</code> - Number of iterations.</dd>
  15579. <dd><code>reprojectionError</code> - Inlier threshold value used by the RANSAC procedure. The parameter value
  15580. is the maximum allowed distance between the observed and computed point projections to consider it
  15581. an inlier.</dd>
  15582. <dd><code>confidence</code> - The probability that the algorithm produces a useful result.</dd>
  15583. <dd><code>inliers</code> - Output vector that contains indices of inliers in objectPoints and imagePoints .</dd>
  15584. <dd><code>flags</code> - Method for solving a PnP problem (see REF: solvePnP ).
  15585. The function estimates an object pose given a set of object points, their corresponding image
  15586. projections, as well as the camera intrinsic matrix and the distortion coefficients. This function finds such
  15587. a pose that minimizes reprojection error, that is, the sum of squared distances between the observed
  15588. projections imagePoints and the projected (using REF: projectPoints ) objectPoints. The use of RANSAC
  15589. makes the function resistant to outliers.
  15590. <b>Note:</b>
  15591. <ul>
  15592. <li>
  15593. An example of how to use solvePNPRansac for object detection can be found at
  15594. opencv_source_code/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/
  15595. </li>
  15596. <li>
  15597. The default method used to estimate the camera pose for the Minimal Sample Sets step
  15598. is #SOLVEPNP_EPNP. Exceptions are:
  15599. <ul>
  15600. <li>
  15601. if you choose #SOLVEPNP_P3P or #SOLVEPNP_AP3P, these methods will be used.
  15602. </li>
  15603. <li>
  15604. if the number of input points is equal to 4, #SOLVEPNP_P3P is used.
  15605. </li>
  15606. </ul>
  15607. <li>
  15608. The method used to estimate the camera pose using all the inliers is defined by the
  15609. flags parameters unless it is equal to #SOLVEPNP_P3P or #SOLVEPNP_AP3P. In this case,
  15610. the method #SOLVEPNP_EPNP will be used instead.
  15611. </li>
  15612. </ul></dd>
  15613. <dt><span class="returnLabel">Returns:</span></dt>
  15614. <dd>automatically generated</dd>
  15615. </dl>
  15616. </li>
  15617. </ul>
  15618. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  15619. <!-- -->
  15620. </a>
  15621. <ul class="blockList">
  15622. <li class="blockList">
  15623. <h4>solvePnPRansac</h4>
  15624. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15625. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15626. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15627. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15628. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15629. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15630. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers)</pre>
  15631. </li>
  15632. </ul>
  15633. <a name="solvePnPRansac-org.opencv.core.MatOfPoint3f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.MatOfDouble-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.calib3d.UsacParams-">
  15634. <!-- -->
  15635. </a>
  15636. <ul class="blockList">
  15637. <li class="blockList">
  15638. <h4>solvePnPRansac</h4>
  15639. <pre>public static&nbsp;boolean&nbsp;solvePnPRansac(<a href="../../../org/opencv/core/MatOfPoint3f.html" title="class in org.opencv.core">MatOfPoint3f</a>&nbsp;objectPoints,
  15640. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;imagePoints,
  15641. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15642. <a href="../../../org/opencv/core/MatOfDouble.html" title="class in org.opencv.core">MatOfDouble</a>&nbsp;distCoeffs,
  15643. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15644. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15645. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;inliers,
  15646. <a href="../../../org/opencv/calib3d/UsacParams.html" title="class in org.opencv.calib3d">UsacParams</a>&nbsp;params)</pre>
  15647. </li>
  15648. </ul>
  15649. <a name="solvePnPRefineLM-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  15650. <!-- -->
  15651. </a>
  15652. <ul class="blockList">
  15653. <li class="blockList">
  15654. <h4>solvePnPRefineLM</h4>
  15655. <pre>public static&nbsp;void&nbsp;solvePnPRefineLM(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  15656. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  15657. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15658. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  15659. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15660. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</pre>
  15661. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  15662. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.
  15663. SEE: REF: calib3d_solvePnP</div>
  15664. <dl>
  15665. <dt><span class="paramLabel">Parameters:</span></dt>
  15666. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or 1xN/Nx1 3-channel,
  15667. where N is the number of points. vector&lt;Point3d&gt; can also be passed here.</dd>
  15668. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15669. where N is the number of points. vector&lt;Point2d&gt; can also be passed here.</dd>
  15670. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15671. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15672. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15673. assumed.</dd>
  15674. <dd><code>rvec</code> - Input/Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15675. the model coordinate system to the camera coordinate system. Input values are used as an initial solution.</dd>
  15676. <dd><code>tvec</code> - Input/Output translation vector. Input values are used as an initial solution.
  15677. The function refines the object pose given at least 3 object points, their corresponding image
  15678. projections, an initial solution for the rotation and translation vector,
  15679. as well as the camera intrinsic matrix and the distortion coefficients.
  15680. The function minimizes the projection error with respect to the rotation and the translation vectors, according
  15681. to a Levenberg-Marquardt iterative minimization CITE: Madsen04 CITE: Eade13 process.</dd>
  15682. </dl>
  15683. </li>
  15684. </ul>
  15685. <a name="solvePnPRefineLM-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">
  15686. <!-- -->
  15687. </a>
  15688. <ul class="blockList">
  15689. <li class="blockList">
  15690. <h4>solvePnPRefineLM</h4>
  15691. <pre>public static&nbsp;void&nbsp;solvePnPRefineLM(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  15692. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  15693. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15694. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  15695. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15696. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15697. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  15698. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  15699. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.
  15700. SEE: REF: calib3d_solvePnP</div>
  15701. <dl>
  15702. <dt><span class="paramLabel">Parameters:</span></dt>
  15703. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or 1xN/Nx1 3-channel,
  15704. where N is the number of points. vector&lt;Point3d&gt; can also be passed here.</dd>
  15705. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15706. where N is the number of points. vector&lt;Point2d&gt; can also be passed here.</dd>
  15707. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15708. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15709. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15710. assumed.</dd>
  15711. <dd><code>rvec</code> - Input/Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15712. the model coordinate system to the camera coordinate system. Input values are used as an initial solution.</dd>
  15713. <dd><code>tvec</code> - Input/Output translation vector. Input values are used as an initial solution.</dd>
  15714. <dd><code>criteria</code> - Criteria when to stop the Levenberg-Marquard iterative algorithm.
  15715. The function refines the object pose given at least 3 object points, their corresponding image
  15716. projections, an initial solution for the rotation and translation vector,
  15717. as well as the camera intrinsic matrix and the distortion coefficients.
  15718. The function minimizes the projection error with respect to the rotation and the translation vectors, according
  15719. to a Levenberg-Marquardt iterative minimization CITE: Madsen04 CITE: Eade13 process.</dd>
  15720. </dl>
  15721. </li>
  15722. </ul>
  15723. <a name="solvePnPRefineVVS-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  15724. <!-- -->
  15725. </a>
  15726. <ul class="blockList">
  15727. <li class="blockList">
  15728. <h4>solvePnPRefineVVS</h4>
  15729. <pre>public static&nbsp;void&nbsp;solvePnPRefineVVS(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  15730. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  15731. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15732. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  15733. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15734. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec)</pre>
  15735. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  15736. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.
  15737. SEE: REF: calib3d_solvePnP</div>
  15738. <dl>
  15739. <dt><span class="paramLabel">Parameters:</span></dt>
  15740. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or 1xN/Nx1 3-channel,
  15741. where N is the number of points. vector&lt;Point3d&gt; can also be passed here.</dd>
  15742. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15743. where N is the number of points. vector&lt;Point2d&gt; can also be passed here.</dd>
  15744. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15745. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15746. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15747. assumed.</dd>
  15748. <dd><code>rvec</code> - Input/Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15749. the model coordinate system to the camera coordinate system. Input values are used as an initial solution.</dd>
  15750. <dd><code>tvec</code> - Input/Output translation vector. Input values are used as an initial solution.
  15751. gain in the Damped Gauss-Newton formulation.
  15752. The function refines the object pose given at least 3 object points, their corresponding image
  15753. projections, an initial solution for the rotation and translation vector,
  15754. as well as the camera intrinsic matrix and the distortion coefficients.
  15755. The function minimizes the projection error with respect to the rotation and the translation vectors, using a
  15756. virtual visual servoing (VVS) CITE: Chaumette06 CITE: Marchand16 scheme.</dd>
  15757. </dl>
  15758. </li>
  15759. </ul>
  15760. <a name="solvePnPRefineVVS-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">
  15761. <!-- -->
  15762. </a>
  15763. <ul class="blockList">
  15764. <li class="blockList">
  15765. <h4>solvePnPRefineVVS</h4>
  15766. <pre>public static&nbsp;void&nbsp;solvePnPRefineVVS(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  15767. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  15768. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15769. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  15770. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15771. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15772. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  15773. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  15774. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.
  15775. SEE: REF: calib3d_solvePnP</div>
  15776. <dl>
  15777. <dt><span class="paramLabel">Parameters:</span></dt>
  15778. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or 1xN/Nx1 3-channel,
  15779. where N is the number of points. vector&lt;Point3d&gt; can also be passed here.</dd>
  15780. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15781. where N is the number of points. vector&lt;Point2d&gt; can also be passed here.</dd>
  15782. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15783. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15784. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15785. assumed.</dd>
  15786. <dd><code>rvec</code> - Input/Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15787. the model coordinate system to the camera coordinate system. Input values are used as an initial solution.</dd>
  15788. <dd><code>tvec</code> - Input/Output translation vector. Input values are used as an initial solution.</dd>
  15789. <dd><code>criteria</code> - Criteria when to stop the Levenberg-Marquard iterative algorithm.
  15790. gain in the Damped Gauss-Newton formulation.
  15791. The function refines the object pose given at least 3 object points, their corresponding image
  15792. projections, an initial solution for the rotation and translation vector,
  15793. as well as the camera intrinsic matrix and the distortion coefficients.
  15794. The function minimizes the projection error with respect to the rotation and the translation vectors, using a
  15795. virtual visual servoing (VVS) CITE: Chaumette06 CITE: Marchand16 scheme.</dd>
  15796. </dl>
  15797. </li>
  15798. </ul>
  15799. <a name="solvePnPRefineVVS-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-double-">
  15800. <!-- -->
  15801. </a>
  15802. <ul class="blockList">
  15803. <li class="blockList">
  15804. <h4>solvePnPRefineVVS</h4>
  15805. <pre>public static&nbsp;void&nbsp;solvePnPRefineVVS(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;objectPoints,
  15806. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;imagePoints,
  15807. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  15808. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  15809. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;rvec,
  15810. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;tvec,
  15811. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria,
  15812. double&nbsp;VVSlambda)</pre>
  15813. <div class="block">Refine a pose (the translation and the rotation that transform a 3D point expressed in the object coordinate frame
  15814. to the camera coordinate frame) from a 3D-2D point correspondences and starting from an initial solution.
  15815. SEE: REF: calib3d_solvePnP</div>
  15816. <dl>
  15817. <dt><span class="paramLabel">Parameters:</span></dt>
  15818. <dd><code>objectPoints</code> - Array of object points in the object coordinate space, Nx3 1-channel or 1xN/Nx1 3-channel,
  15819. where N is the number of points. vector&lt;Point3d&gt; can also be passed here.</dd>
  15820. <dd><code>imagePoints</code> - Array of corresponding image points, Nx2 1-channel or 1xN/Nx1 2-channel,
  15821. where N is the number of points. vector&lt;Point2d&gt; can also be passed here.</dd>
  15822. <dd><code>cameraMatrix</code> - Input camera intrinsic matrix \(\cameramatrix{A}\) .</dd>
  15823. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  15824. \(\distcoeffs\). If the vector is NULL/empty, the zero distortion coefficients are
  15825. assumed.</dd>
  15826. <dd><code>rvec</code> - Input/Output rotation vector (see REF: Rodrigues ) that, together with tvec, brings points from
  15827. the model coordinate system to the camera coordinate system. Input values are used as an initial solution.</dd>
  15828. <dd><code>tvec</code> - Input/Output translation vector. Input values are used as an initial solution.</dd>
  15829. <dd><code>criteria</code> - Criteria when to stop the Levenberg-Marquard iterative algorithm.</dd>
  15830. <dd><code>VVSlambda</code> - Gain for the virtual visual servoing control law, equivalent to the \(\alpha\)
  15831. gain in the Damped Gauss-Newton formulation.
  15832. The function refines the object pose given at least 3 object points, their corresponding image
  15833. projections, an initial solution for the rotation and translation vector,
  15834. as well as the camera intrinsic matrix and the distortion coefficients.
  15835. The function minimizes the projection error with respect to the rotation and the translation vectors, using a
  15836. virtual visual servoing (VVS) CITE: Chaumette06 CITE: Marchand16 scheme.</dd>
  15837. </dl>
  15838. </li>
  15839. </ul>
  15840. <a name="stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  15841. <!-- -->
  15842. </a>
  15843. <ul class="blockList">
  15844. <li class="blockList">
  15845. <h4>stereoCalibrate</h4>
  15846. <pre>public static&nbsp;double&nbsp;stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15847. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15848. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15849. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15850. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15851. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15852. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15853. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15854. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15855. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15856. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15857. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F)</pre>
  15858. </li>
  15859. </ul>
  15860. <a name="stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  15861. <!-- -->
  15862. </a>
  15863. <ul class="blockList">
  15864. <li class="blockList">
  15865. <h4>stereoCalibrate</h4>
  15866. <pre>public static&nbsp;double&nbsp;stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15867. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15868. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15869. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15870. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15871. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15872. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15873. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15874. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15875. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15876. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15877. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  15878. int&nbsp;flags)</pre>
  15879. </li>
  15880. </ul>
  15881. <a name="stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  15882. <!-- -->
  15883. </a>
  15884. <ul class="blockList">
  15885. <li class="blockList">
  15886. <h4>stereoCalibrate</h4>
  15887. <pre>public static&nbsp;double&nbsp;stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15888. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15889. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15890. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15891. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15892. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15893. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15894. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15895. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15896. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15897. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15898. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  15899. int&nbsp;flags,
  15900. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  15901. </li>
  15902. </ul>
  15903. <a name="stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  15904. <!-- -->
  15905. </a>
  15906. <ul class="blockList">
  15907. <li class="blockList">
  15908. <h4>stereoCalibrate</h4>
  15909. <pre>public static&nbsp;double&nbsp;stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15910. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15911. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15912. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15913. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15914. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15915. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15916. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15917. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15918. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15919. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15920. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  15921. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</pre>
  15922. </li>
  15923. </ul>
  15924. <a name="stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  15925. <!-- -->
  15926. </a>
  15927. <ul class="blockList">
  15928. <li class="blockList">
  15929. <h4>stereoCalibrate</h4>
  15930. <pre>public static&nbsp;double&nbsp;stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15931. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15932. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15933. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15934. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15935. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15936. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15937. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15938. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15939. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15940. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15941. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  15942. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  15943. int&nbsp;flags)</pre>
  15944. </li>
  15945. </ul>
  15946. <a name="stereoCalibrate-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  15947. <!-- -->
  15948. </a>
  15949. <ul class="blockList">
  15950. <li class="blockList">
  15951. <h4>stereoCalibrate</h4>
  15952. <pre>public static&nbsp;double&nbsp;stereoCalibrate(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15953. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15954. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15955. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15956. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15957. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15958. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15959. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15960. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15961. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15962. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15963. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  15964. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  15965. int&nbsp;flags,
  15966. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  15967. </li>
  15968. </ul>
  15969. <a name="stereoCalibrateExtended-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-">
  15970. <!-- -->
  15971. </a>
  15972. <ul class="blockList">
  15973. <li class="blockList">
  15974. <h4>stereoCalibrateExtended</h4>
  15975. <pre>public static&nbsp;double&nbsp;stereoCalibrateExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  15976. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  15977. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  15978. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  15979. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  15980. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  15981. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  15982. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  15983. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  15984. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  15985. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  15986. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  15987. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  15988. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  15989. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors)</pre>
  15990. <div class="block">Calibrates a stereo camera set up. This function finds the intrinsic parameters
  15991. for each of the two cameras and the extrinsic parameters between the two cameras.</div>
  15992. <dl>
  15993. <dt><span class="paramLabel">Parameters:</span></dt>
  15994. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points. The same structure as
  15995. in REF: calibrateCamera. For each pattern view, both cameras need to see the same object
  15996. points. Therefore, objectPoints.size(), imagePoints1.size(), and imagePoints2.size() need to be
  15997. equal as well as objectPoints[i].size(), imagePoints1[i].size(), and imagePoints2[i].size() need to
  15998. be equal for each i.</dd>
  15999. <dd><code>imagePoints1</code> - Vector of vectors of the projections of the calibration pattern points,
  16000. observed by the first camera. The same structure as in REF: calibrateCamera.</dd>
  16001. <dd><code>imagePoints2</code> - Vector of vectors of the projections of the calibration pattern points,
  16002. observed by the second camera. The same structure as in REF: calibrateCamera.</dd>
  16003. <dd><code>cameraMatrix1</code> - Input/output camera intrinsic matrix for the first camera, the same as in
  16004. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  16005. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  16006. REF: calibrateCamera.</dd>
  16007. <dd><code>cameraMatrix2</code> - Input/output second camera intrinsic matrix for the second camera. See description for
  16008. cameraMatrix1.</dd>
  16009. <dd><code>distCoeffs2</code> - Input/output lens distortion coefficients for the second camera. See
  16010. description for distCoeffs1.</dd>
  16011. <dd><code>imageSize</code> - Size of the image used only to initialize the camera intrinsic matrices.</dd>
  16012. <dd><code>R</code> - Output rotation matrix. Together with the translation vector T, this matrix brings
  16013. points given in the first camera's coordinate system to points in the second camera's
  16014. coordinate system. In more technical terms, the tuple of R and T performs a change of basis
  16015. from the first camera's coordinate system to the second camera's coordinate system. Due to its
  16016. duality, this tuple is equivalent to the position of the first camera with respect to the
  16017. second camera coordinate system.</dd>
  16018. <dd><code>T</code> - Output translation vector, see description above.</dd>
  16019. <dd><code>E</code> - Output essential matrix.</dd>
  16020. <dd><code>F</code> - Output fundamental matrix.</dd>
  16021. <dd><code>rvecs</code> - Output vector of rotation vectors ( REF: Rodrigues ) estimated for each pattern view in the
  16022. coordinate system of the first camera of the stereo pair (e.g. std::vector&lt;cv::Mat&gt;). More in detail, each
  16023. i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
  16024. description) brings the calibration pattern from the object coordinate space (in which object points are
  16025. specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
  16026. the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
  16027. to camera coordinate space of the first camera of the stereo pair.</dd>
  16028. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter description
  16029. of previous output parameter ( rvecs ).</dd>
  16030. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.
  16031. <ul>
  16032. <li>
  16033. REF: CALIB_FIX_INTRINSIC Fix cameraMatrix? and distCoeffs? so that only R, T, E, and F
  16034. matrices are estimated.
  16035. </li>
  16036. <li>
  16037. REF: CALIB_USE_INTRINSIC_GUESS Optimize some or all of the intrinsic parameters
  16038. according to the specified flags. Initial values are provided by the user.
  16039. </li>
  16040. <li>
  16041. REF: CALIB_USE_EXTRINSIC_GUESS R and T contain valid initial values that are optimized further.
  16042. Otherwise R and T are initialized to the median value of the pattern views (each dimension separately).
  16043. </li>
  16044. <li>
  16045. REF: CALIB_FIX_PRINCIPAL_POINT Fix the principal points during the optimization.
  16046. </li>
  16047. <li>
  16048. REF: CALIB_FIX_FOCAL_LENGTH Fix \(f^{(j)}_x\) and \(f^{(j)}_y\) .
  16049. </li>
  16050. <li>
  16051. REF: CALIB_FIX_ASPECT_RATIO Optimize \(f^{(j)}_y\) . Fix the ratio \(f^{(j)}_x/f^{(j)}_y\)
  16052. .
  16053. </li>
  16054. <li>
  16055. REF: CALIB_SAME_FOCAL_LENGTH Enforce \(f^{(0)}_x=f^{(1)}_x\) and \(f^{(0)}_y=f^{(1)}_y\) .
  16056. </li>
  16057. <li>
  16058. REF: CALIB_ZERO_TANGENT_DIST Set tangential distortion coefficients for each camera to
  16059. zeros and fix there.
  16060. </li>
  16061. <li>
  16062. REF: CALIB_FIX_K1,..., REF: CALIB_FIX_K6 Do not change the corresponding radial
  16063. distortion coefficient during the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set,
  16064. the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16065. </li>
  16066. <li>
  16067. REF: CALIB_RATIONAL_MODEL Enable coefficients k4, k5, and k6. To provide the backward
  16068. compatibility, this extra flag should be explicitly specified to make the calibration
  16069. function use the rational model and return 8 coefficients. If the flag is not set, the
  16070. function computes and returns only 5 distortion coefficients.
  16071. </li>
  16072. <li>
  16073. REF: CALIB_THIN_PRISM_MODEL Coefficients s1, s2, s3 and s4 are enabled. To provide the
  16074. backward compatibility, this extra flag should be explicitly specified to make the
  16075. calibration function use the thin prism model and return 12 coefficients. If the flag is not
  16076. set, the function computes and returns only 5 distortion coefficients.
  16077. </li>
  16078. <li>
  16079. REF: CALIB_FIX_S1_S2_S3_S4 The thin prism distortion coefficients are not changed during
  16080. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  16081. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16082. </li>
  16083. <li>
  16084. REF: CALIB_TILTED_MODEL Coefficients tauX and tauY are enabled. To provide the
  16085. backward compatibility, this extra flag should be explicitly specified to make the
  16086. calibration function use the tilted sensor model and return 14 coefficients. If the flag is not
  16087. set, the function computes and returns only 5 distortion coefficients.
  16088. </li>
  16089. <li>
  16090. REF: CALIB_FIX_TAUX_TAUY The coefficients of the tilted sensor model are not changed during
  16091. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  16092. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16093. </li>
  16094. </ul>
  16095. The function estimates the transformation between two cameras making a stereo pair. If one computes
  16096. the poses of an object relative to the first camera and to the second camera,
  16097. ( \(R_1\),\(T_1\) ) and (\(R_2\),\(T_2\)), respectively, for a stereo camera where the
  16098. relative position and orientation between the two cameras are fixed, then those poses definitely
  16099. relate to each other. This means, if the relative position and orientation (\(R\),\(T\)) of the
  16100. two cameras is known, it is possible to compute (\(R_2\),\(T_2\)) when (\(R_1\),\(T_1\)) is
  16101. given. This is what the described function does. It computes (\(R\),\(T\)) such that:
  16102. \(R_2=R R_1\)
  16103. \(T_2=R T_1 + T.\)
  16104. Therefore, one can compute the coordinate representation of a 3D point for the second camera's
  16105. coordinate system when given the point's coordinate representation in the first camera's coordinate
  16106. system:
  16107. \(\begin{bmatrix}
  16108. X_2 \\
  16109. Y_2 \\
  16110. Z_2 \\
  16111. 1
  16112. \end{bmatrix} = \begin{bmatrix}
  16113. R &amp; T \\
  16114. 0 &amp; 1
  16115. \end{bmatrix} \begin{bmatrix}
  16116. X_1 \\
  16117. Y_1 \\
  16118. Z_1 \\
  16119. 1
  16120. \end{bmatrix}.\)
  16121. Optionally, it computes the essential matrix E:
  16122. \(E= \vecthreethree{0}{-T_2}{T_1}{T_2}{0}{-T_0}{-T_1}{T_0}{0} R\)
  16123. where \(T_i\) are components of the translation vector \(T\) : \(T=[T_0, T_1, T_2]^T\) .
  16124. And the function can also compute the fundamental matrix F:
  16125. \(F = cameraMatrix2^{-T}\cdot E \cdot cameraMatrix1^{-1}\)
  16126. Besides the stereo-related information, the function can also perform a full calibration of each of
  16127. the two cameras. However, due to the high dimensionality of the parameter space and noise in the
  16128. input data, the function can diverge from the correct solution. If the intrinsic parameters can be
  16129. estimated with high accuracy for each of the cameras individually (for example, using
  16130. #calibrateCamera ), you are recommended to do so and then pass REF: CALIB_FIX_INTRINSIC flag to the
  16131. function along with the computed intrinsic parameters. Otherwise, if all the parameters are
  16132. estimated at once, it makes sense to restrict some parameters, for example, pass
  16133. REF: CALIB_SAME_FOCAL_LENGTH and REF: CALIB_ZERO_TANGENT_DIST flags, which is usually a
  16134. reasonable assumption.
  16135. Similarly to #calibrateCamera, the function minimizes the total re-projection error for all the
  16136. points in all the available views from both cameras. The function returns the final value of the
  16137. re-projection error.</dd>
  16138. <dt><span class="returnLabel">Returns:</span></dt>
  16139. <dd>automatically generated</dd>
  16140. </dl>
  16141. </li>
  16142. </ul>
  16143. <a name="stereoCalibrateExtended-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-">
  16144. <!-- -->
  16145. </a>
  16146. <ul class="blockList">
  16147. <li class="blockList">
  16148. <h4>stereoCalibrateExtended</h4>
  16149. <pre>public static&nbsp;double&nbsp;stereoCalibrateExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  16150. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  16151. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  16152. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  16153. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  16154. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  16155. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  16156. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  16157. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  16158. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  16159. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  16160. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  16161. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  16162. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  16163. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  16164. int&nbsp;flags)</pre>
  16165. <div class="block">Calibrates a stereo camera set up. This function finds the intrinsic parameters
  16166. for each of the two cameras and the extrinsic parameters between the two cameras.</div>
  16167. <dl>
  16168. <dt><span class="paramLabel">Parameters:</span></dt>
  16169. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points. The same structure as
  16170. in REF: calibrateCamera. For each pattern view, both cameras need to see the same object
  16171. points. Therefore, objectPoints.size(), imagePoints1.size(), and imagePoints2.size() need to be
  16172. equal as well as objectPoints[i].size(), imagePoints1[i].size(), and imagePoints2[i].size() need to
  16173. be equal for each i.</dd>
  16174. <dd><code>imagePoints1</code> - Vector of vectors of the projections of the calibration pattern points,
  16175. observed by the first camera. The same structure as in REF: calibrateCamera.</dd>
  16176. <dd><code>imagePoints2</code> - Vector of vectors of the projections of the calibration pattern points,
  16177. observed by the second camera. The same structure as in REF: calibrateCamera.</dd>
  16178. <dd><code>cameraMatrix1</code> - Input/output camera intrinsic matrix for the first camera, the same as in
  16179. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  16180. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  16181. REF: calibrateCamera.</dd>
  16182. <dd><code>cameraMatrix2</code> - Input/output second camera intrinsic matrix for the second camera. See description for
  16183. cameraMatrix1.</dd>
  16184. <dd><code>distCoeffs2</code> - Input/output lens distortion coefficients for the second camera. See
  16185. description for distCoeffs1.</dd>
  16186. <dd><code>imageSize</code> - Size of the image used only to initialize the camera intrinsic matrices.</dd>
  16187. <dd><code>R</code> - Output rotation matrix. Together with the translation vector T, this matrix brings
  16188. points given in the first camera's coordinate system to points in the second camera's
  16189. coordinate system. In more technical terms, the tuple of R and T performs a change of basis
  16190. from the first camera's coordinate system to the second camera's coordinate system. Due to its
  16191. duality, this tuple is equivalent to the position of the first camera with respect to the
  16192. second camera coordinate system.</dd>
  16193. <dd><code>T</code> - Output translation vector, see description above.</dd>
  16194. <dd><code>E</code> - Output essential matrix.</dd>
  16195. <dd><code>F</code> - Output fundamental matrix.</dd>
  16196. <dd><code>rvecs</code> - Output vector of rotation vectors ( REF: Rodrigues ) estimated for each pattern view in the
  16197. coordinate system of the first camera of the stereo pair (e.g. std::vector&lt;cv::Mat&gt;). More in detail, each
  16198. i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
  16199. description) brings the calibration pattern from the object coordinate space (in which object points are
  16200. specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
  16201. the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
  16202. to camera coordinate space of the first camera of the stereo pair.</dd>
  16203. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter description
  16204. of previous output parameter ( rvecs ).</dd>
  16205. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.</dd>
  16206. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  16207. <ul>
  16208. <li>
  16209. REF: CALIB_FIX_INTRINSIC Fix cameraMatrix? and distCoeffs? so that only R, T, E, and F
  16210. matrices are estimated.
  16211. </li>
  16212. <li>
  16213. REF: CALIB_USE_INTRINSIC_GUESS Optimize some or all of the intrinsic parameters
  16214. according to the specified flags. Initial values are provided by the user.
  16215. </li>
  16216. <li>
  16217. REF: CALIB_USE_EXTRINSIC_GUESS R and T contain valid initial values that are optimized further.
  16218. Otherwise R and T are initialized to the median value of the pattern views (each dimension separately).
  16219. </li>
  16220. <li>
  16221. REF: CALIB_FIX_PRINCIPAL_POINT Fix the principal points during the optimization.
  16222. </li>
  16223. <li>
  16224. REF: CALIB_FIX_FOCAL_LENGTH Fix \(f^{(j)}_x\) and \(f^{(j)}_y\) .
  16225. </li>
  16226. <li>
  16227. REF: CALIB_FIX_ASPECT_RATIO Optimize \(f^{(j)}_y\) . Fix the ratio \(f^{(j)}_x/f^{(j)}_y\)
  16228. .
  16229. </li>
  16230. <li>
  16231. REF: CALIB_SAME_FOCAL_LENGTH Enforce \(f^{(0)}_x=f^{(1)}_x\) and \(f^{(0)}_y=f^{(1)}_y\) .
  16232. </li>
  16233. <li>
  16234. REF: CALIB_ZERO_TANGENT_DIST Set tangential distortion coefficients for each camera to
  16235. zeros and fix there.
  16236. </li>
  16237. <li>
  16238. REF: CALIB_FIX_K1,..., REF: CALIB_FIX_K6 Do not change the corresponding radial
  16239. distortion coefficient during the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set,
  16240. the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16241. </li>
  16242. <li>
  16243. REF: CALIB_RATIONAL_MODEL Enable coefficients k4, k5, and k6. To provide the backward
  16244. compatibility, this extra flag should be explicitly specified to make the calibration
  16245. function use the rational model and return 8 coefficients. If the flag is not set, the
  16246. function computes and returns only 5 distortion coefficients.
  16247. </li>
  16248. <li>
  16249. REF: CALIB_THIN_PRISM_MODEL Coefficients s1, s2, s3 and s4 are enabled. To provide the
  16250. backward compatibility, this extra flag should be explicitly specified to make the
  16251. calibration function use the thin prism model and return 12 coefficients. If the flag is not
  16252. set, the function computes and returns only 5 distortion coefficients.
  16253. </li>
  16254. <li>
  16255. REF: CALIB_FIX_S1_S2_S3_S4 The thin prism distortion coefficients are not changed during
  16256. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  16257. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16258. </li>
  16259. <li>
  16260. REF: CALIB_TILTED_MODEL Coefficients tauX and tauY are enabled. To provide the
  16261. backward compatibility, this extra flag should be explicitly specified to make the
  16262. calibration function use the tilted sensor model and return 14 coefficients. If the flag is not
  16263. set, the function computes and returns only 5 distortion coefficients.
  16264. </li>
  16265. <li>
  16266. REF: CALIB_FIX_TAUX_TAUY The coefficients of the tilted sensor model are not changed during
  16267. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  16268. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16269. </li>
  16270. </ul>
  16271. The function estimates the transformation between two cameras making a stereo pair. If one computes
  16272. the poses of an object relative to the first camera and to the second camera,
  16273. ( \(R_1\),\(T_1\) ) and (\(R_2\),\(T_2\)), respectively, for a stereo camera where the
  16274. relative position and orientation between the two cameras are fixed, then those poses definitely
  16275. relate to each other. This means, if the relative position and orientation (\(R\),\(T\)) of the
  16276. two cameras is known, it is possible to compute (\(R_2\),\(T_2\)) when (\(R_1\),\(T_1\)) is
  16277. given. This is what the described function does. It computes (\(R\),\(T\)) such that:
  16278. \(R_2=R R_1\)
  16279. \(T_2=R T_1 + T.\)
  16280. Therefore, one can compute the coordinate representation of a 3D point for the second camera's
  16281. coordinate system when given the point's coordinate representation in the first camera's coordinate
  16282. system:
  16283. \(\begin{bmatrix}
  16284. X_2 \\
  16285. Y_2 \\
  16286. Z_2 \\
  16287. 1
  16288. \end{bmatrix} = \begin{bmatrix}
  16289. R &amp; T \\
  16290. 0 &amp; 1
  16291. \end{bmatrix} \begin{bmatrix}
  16292. X_1 \\
  16293. Y_1 \\
  16294. Z_1 \\
  16295. 1
  16296. \end{bmatrix}.\)
  16297. Optionally, it computes the essential matrix E:
  16298. \(E= \vecthreethree{0}{-T_2}{T_1}{T_2}{0}{-T_0}{-T_1}{T_0}{0} R\)
  16299. where \(T_i\) are components of the translation vector \(T\) : \(T=[T_0, T_1, T_2]^T\) .
  16300. And the function can also compute the fundamental matrix F:
  16301. \(F = cameraMatrix2^{-T}\cdot E \cdot cameraMatrix1^{-1}\)
  16302. Besides the stereo-related information, the function can also perform a full calibration of each of
  16303. the two cameras. However, due to the high dimensionality of the parameter space and noise in the
  16304. input data, the function can diverge from the correct solution. If the intrinsic parameters can be
  16305. estimated with high accuracy for each of the cameras individually (for example, using
  16306. #calibrateCamera ), you are recommended to do so and then pass REF: CALIB_FIX_INTRINSIC flag to the
  16307. function along with the computed intrinsic parameters. Otherwise, if all the parameters are
  16308. estimated at once, it makes sense to restrict some parameters, for example, pass
  16309. REF: CALIB_SAME_FOCAL_LENGTH and REF: CALIB_ZERO_TANGENT_DIST flags, which is usually a
  16310. reasonable assumption.
  16311. Similarly to #calibrateCamera, the function minimizes the total re-projection error for all the
  16312. points in all the available views from both cameras. The function returns the final value of the
  16313. re-projection error.</dd>
  16314. <dt><span class="returnLabel">Returns:</span></dt>
  16315. <dd>automatically generated</dd>
  16316. </dl>
  16317. </li>
  16318. </ul>
  16319. <a name="stereoCalibrateExtended-java.util.List-java.util.List-java.util.List-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-java.util.List-java.util.List-org.opencv.core.Mat-int-org.opencv.core.TermCriteria-">
  16320. <!-- -->
  16321. </a>
  16322. <ul class="blockList">
  16323. <li class="blockList">
  16324. <h4>stereoCalibrateExtended</h4>
  16325. <pre>public static&nbsp;double&nbsp;stereoCalibrateExtended(java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;objectPoints,
  16326. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints1,
  16327. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;imagePoints2,
  16328. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  16329. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  16330. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  16331. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  16332. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  16333. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  16334. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  16335. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;E,
  16336. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  16337. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;rvecs,
  16338. java.util.List&lt;<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&gt;&nbsp;tvecs,
  16339. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;perViewErrors,
  16340. int&nbsp;flags,
  16341. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  16342. <div class="block">Calibrates a stereo camera set up. This function finds the intrinsic parameters
  16343. for each of the two cameras and the extrinsic parameters between the two cameras.</div>
  16344. <dl>
  16345. <dt><span class="paramLabel">Parameters:</span></dt>
  16346. <dd><code>objectPoints</code> - Vector of vectors of the calibration pattern points. The same structure as
  16347. in REF: calibrateCamera. For each pattern view, both cameras need to see the same object
  16348. points. Therefore, objectPoints.size(), imagePoints1.size(), and imagePoints2.size() need to be
  16349. equal as well as objectPoints[i].size(), imagePoints1[i].size(), and imagePoints2[i].size() need to
  16350. be equal for each i.</dd>
  16351. <dd><code>imagePoints1</code> - Vector of vectors of the projections of the calibration pattern points,
  16352. observed by the first camera. The same structure as in REF: calibrateCamera.</dd>
  16353. <dd><code>imagePoints2</code> - Vector of vectors of the projections of the calibration pattern points,
  16354. observed by the second camera. The same structure as in REF: calibrateCamera.</dd>
  16355. <dd><code>cameraMatrix1</code> - Input/output camera intrinsic matrix for the first camera, the same as in
  16356. REF: calibrateCamera. Furthermore, for the stereo case, additional flags may be used, see below.</dd>
  16357. <dd><code>distCoeffs1</code> - Input/output vector of distortion coefficients, the same as in
  16358. REF: calibrateCamera.</dd>
  16359. <dd><code>cameraMatrix2</code> - Input/output second camera intrinsic matrix for the second camera. See description for
  16360. cameraMatrix1.</dd>
  16361. <dd><code>distCoeffs2</code> - Input/output lens distortion coefficients for the second camera. See
  16362. description for distCoeffs1.</dd>
  16363. <dd><code>imageSize</code> - Size of the image used only to initialize the camera intrinsic matrices.</dd>
  16364. <dd><code>R</code> - Output rotation matrix. Together with the translation vector T, this matrix brings
  16365. points given in the first camera's coordinate system to points in the second camera's
  16366. coordinate system. In more technical terms, the tuple of R and T performs a change of basis
  16367. from the first camera's coordinate system to the second camera's coordinate system. Due to its
  16368. duality, this tuple is equivalent to the position of the first camera with respect to the
  16369. second camera coordinate system.</dd>
  16370. <dd><code>T</code> - Output translation vector, see description above.</dd>
  16371. <dd><code>E</code> - Output essential matrix.</dd>
  16372. <dd><code>F</code> - Output fundamental matrix.</dd>
  16373. <dd><code>rvecs</code> - Output vector of rotation vectors ( REF: Rodrigues ) estimated for each pattern view in the
  16374. coordinate system of the first camera of the stereo pair (e.g. std::vector&lt;cv::Mat&gt;). More in detail, each
  16375. i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
  16376. description) brings the calibration pattern from the object coordinate space (in which object points are
  16377. specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
  16378. the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
  16379. to camera coordinate space of the first camera of the stereo pair.</dd>
  16380. <dd><code>tvecs</code> - Output vector of translation vectors estimated for each pattern view, see parameter description
  16381. of previous output parameter ( rvecs ).</dd>
  16382. <dd><code>perViewErrors</code> - Output vector of the RMS re-projection error estimated for each pattern view.</dd>
  16383. <dd><code>flags</code> - Different flags that may be zero or a combination of the following values:
  16384. <ul>
  16385. <li>
  16386. REF: CALIB_FIX_INTRINSIC Fix cameraMatrix? and distCoeffs? so that only R, T, E, and F
  16387. matrices are estimated.
  16388. </li>
  16389. <li>
  16390. REF: CALIB_USE_INTRINSIC_GUESS Optimize some or all of the intrinsic parameters
  16391. according to the specified flags. Initial values are provided by the user.
  16392. </li>
  16393. <li>
  16394. REF: CALIB_USE_EXTRINSIC_GUESS R and T contain valid initial values that are optimized further.
  16395. Otherwise R and T are initialized to the median value of the pattern views (each dimension separately).
  16396. </li>
  16397. <li>
  16398. REF: CALIB_FIX_PRINCIPAL_POINT Fix the principal points during the optimization.
  16399. </li>
  16400. <li>
  16401. REF: CALIB_FIX_FOCAL_LENGTH Fix \(f^{(j)}_x\) and \(f^{(j)}_y\) .
  16402. </li>
  16403. <li>
  16404. REF: CALIB_FIX_ASPECT_RATIO Optimize \(f^{(j)}_y\) . Fix the ratio \(f^{(j)}_x/f^{(j)}_y\)
  16405. .
  16406. </li>
  16407. <li>
  16408. REF: CALIB_SAME_FOCAL_LENGTH Enforce \(f^{(0)}_x=f^{(1)}_x\) and \(f^{(0)}_y=f^{(1)}_y\) .
  16409. </li>
  16410. <li>
  16411. REF: CALIB_ZERO_TANGENT_DIST Set tangential distortion coefficients for each camera to
  16412. zeros and fix there.
  16413. </li>
  16414. <li>
  16415. REF: CALIB_FIX_K1,..., REF: CALIB_FIX_K6 Do not change the corresponding radial
  16416. distortion coefficient during the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set,
  16417. the coefficient from the supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16418. </li>
  16419. <li>
  16420. REF: CALIB_RATIONAL_MODEL Enable coefficients k4, k5, and k6. To provide the backward
  16421. compatibility, this extra flag should be explicitly specified to make the calibration
  16422. function use the rational model and return 8 coefficients. If the flag is not set, the
  16423. function computes and returns only 5 distortion coefficients.
  16424. </li>
  16425. <li>
  16426. REF: CALIB_THIN_PRISM_MODEL Coefficients s1, s2, s3 and s4 are enabled. To provide the
  16427. backward compatibility, this extra flag should be explicitly specified to make the
  16428. calibration function use the thin prism model and return 12 coefficients. If the flag is not
  16429. set, the function computes and returns only 5 distortion coefficients.
  16430. </li>
  16431. <li>
  16432. REF: CALIB_FIX_S1_S2_S3_S4 The thin prism distortion coefficients are not changed during
  16433. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  16434. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16435. </li>
  16436. <li>
  16437. REF: CALIB_TILTED_MODEL Coefficients tauX and tauY are enabled. To provide the
  16438. backward compatibility, this extra flag should be explicitly specified to make the
  16439. calibration function use the tilted sensor model and return 14 coefficients. If the flag is not
  16440. set, the function computes and returns only 5 distortion coefficients.
  16441. </li>
  16442. <li>
  16443. REF: CALIB_FIX_TAUX_TAUY The coefficients of the tilted sensor model are not changed during
  16444. the optimization. If REF: CALIB_USE_INTRINSIC_GUESS is set, the coefficient from the
  16445. supplied distCoeffs matrix is used. Otherwise, it is set to 0.
  16446. </li>
  16447. </ul></dd>
  16448. <dd><code>criteria</code> - Termination criteria for the iterative optimization algorithm.
  16449. The function estimates the transformation between two cameras making a stereo pair. If one computes
  16450. the poses of an object relative to the first camera and to the second camera,
  16451. ( \(R_1\),\(T_1\) ) and (\(R_2\),\(T_2\)), respectively, for a stereo camera where the
  16452. relative position and orientation between the two cameras are fixed, then those poses definitely
  16453. relate to each other. This means, if the relative position and orientation (\(R\),\(T\)) of the
  16454. two cameras is known, it is possible to compute (\(R_2\),\(T_2\)) when (\(R_1\),\(T_1\)) is
  16455. given. This is what the described function does. It computes (\(R\),\(T\)) such that:
  16456. \(R_2=R R_1\)
  16457. \(T_2=R T_1 + T.\)
  16458. Therefore, one can compute the coordinate representation of a 3D point for the second camera's
  16459. coordinate system when given the point's coordinate representation in the first camera's coordinate
  16460. system:
  16461. \(\begin{bmatrix}
  16462. X_2 \\
  16463. Y_2 \\
  16464. Z_2 \\
  16465. 1
  16466. \end{bmatrix} = \begin{bmatrix}
  16467. R &amp; T \\
  16468. 0 &amp; 1
  16469. \end{bmatrix} \begin{bmatrix}
  16470. X_1 \\
  16471. Y_1 \\
  16472. Z_1 \\
  16473. 1
  16474. \end{bmatrix}.\)
  16475. Optionally, it computes the essential matrix E:
  16476. \(E= \vecthreethree{0}{-T_2}{T_1}{T_2}{0}{-T_0}{-T_1}{T_0}{0} R\)
  16477. where \(T_i\) are components of the translation vector \(T\) : \(T=[T_0, T_1, T_2]^T\) .
  16478. And the function can also compute the fundamental matrix F:
  16479. \(F = cameraMatrix2^{-T}\cdot E \cdot cameraMatrix1^{-1}\)
  16480. Besides the stereo-related information, the function can also perform a full calibration of each of
  16481. the two cameras. However, due to the high dimensionality of the parameter space and noise in the
  16482. input data, the function can diverge from the correct solution. If the intrinsic parameters can be
  16483. estimated with high accuracy for each of the cameras individually (for example, using
  16484. #calibrateCamera ), you are recommended to do so and then pass REF: CALIB_FIX_INTRINSIC flag to the
  16485. function along with the computed intrinsic parameters. Otherwise, if all the parameters are
  16486. estimated at once, it makes sense to restrict some parameters, for example, pass
  16487. REF: CALIB_SAME_FOCAL_LENGTH and REF: CALIB_ZERO_TANGENT_DIST flags, which is usually a
  16488. reasonable assumption.
  16489. Similarly to #calibrateCamera, the function minimizes the total re-projection error for all the
  16490. points in all the available views from both cameras. The function returns the final value of the
  16491. re-projection error.</dd>
  16492. <dt><span class="returnLabel">Returns:</span></dt>
  16493. <dd>automatically generated</dd>
  16494. </dl>
  16495. </li>
  16496. </ul>
  16497. <a name="stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  16498. <!-- -->
  16499. </a>
  16500. <ul class="blockList">
  16501. <li class="blockList">
  16502. <h4>stereoRectify</h4>
  16503. <pre>public static&nbsp;void&nbsp;stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  16504. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  16505. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  16506. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  16507. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  16508. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  16509. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  16510. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  16511. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  16512. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  16513. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  16514. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q)</pre>
  16515. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  16516. <dl>
  16517. <dt><span class="paramLabel">Parameters:</span></dt>
  16518. <dd><code>cameraMatrix1</code> - First camera intrinsic matrix.</dd>
  16519. <dd><code>distCoeffs1</code> - First camera distortion parameters.</dd>
  16520. <dd><code>cameraMatrix2</code> - Second camera intrinsic matrix.</dd>
  16521. <dd><code>distCoeffs2</code> - Second camera distortion parameters.</dd>
  16522. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  16523. <dd><code>R</code> - Rotation matrix from the coordinate system of the first camera to the second camera,
  16524. see REF: stereoCalibrate.</dd>
  16525. <dd><code>T</code> - Translation vector from the coordinate system of the first camera to the second camera,
  16526. see REF: stereoCalibrate.</dd>
  16527. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera. This matrix
  16528. brings points given in the unrectified first camera's coordinate system to points in the rectified
  16529. first camera's coordinate system. In more technical terms, it performs a change of basis from the
  16530. unrectified first camera's coordinate system to the rectified first camera's coordinate system.</dd>
  16531. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera. This matrix
  16532. brings points given in the unrectified second camera's coordinate system to points in the rectified
  16533. second camera's coordinate system. In more technical terms, it performs a change of basis from the
  16534. unrectified second camera's coordinate system to the rectified second camera's coordinate system.</dd>
  16535. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  16536. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16537. rectified first camera's image.</dd>
  16538. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  16539. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16540. rectified second camera's image.</dd>
  16541. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see REF: reprojectImageTo3D).
  16542. the function makes the principal points of each camera have the same pixel coordinates in the
  16543. rectified views. And if the flag is not set, the function may still shift the images in the
  16544. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  16545. useful image area.
  16546. scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
  16547. images are zoomed and shifted so that only valid pixels are visible (no black areas after
  16548. rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
  16549. pixels from the original images from the cameras are retained in the rectified images (no source
  16550. image pixels are lost). Any intermediate value yields an intermediate result between
  16551. those two extreme cases.
  16552. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  16553. is passed (default), it is set to the original imageSize . Setting it to a larger value can help you
  16554. preserve details in the original image, especially when there is a big radial distortion.
  16555. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16556. (see the picture below).
  16557. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16558. (see the picture below).
  16559. The function computes the rotation matrices for each camera that (virtually) make both camera image
  16560. planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
  16561. the dense stereo correspondence problem. The function takes the matrices computed by #stereoCalibrate
  16562. as input. As output, it provides two rotation matrices and also two projection matrices in the new
  16563. coordinates. The function distinguishes the following two cases:
  16564. <ul>
  16565. <li>
  16566. <b>Horizontal stereo</b>: the first and the second camera views are shifted relative to each other
  16567. mainly along the x-axis (with possible small vertical shift). In the rectified images, the
  16568. corresponding epipolar lines in the left and right cameras are horizontal and have the same
  16569. y-coordinate. P1 and P2 look like:
  16570. </li>
  16571. </ul>
  16572. \(\texttt{P1} = \begin{bmatrix}
  16573. f &amp; 0 &amp; cx_1 &amp; 0 \\
  16574. 0 &amp; f &amp; cy &amp; 0 \\
  16575. 0 &amp; 0 &amp; 1 &amp; 0
  16576. \end{bmatrix}\)
  16577. \(\texttt{P2} = \begin{bmatrix}
  16578. f &amp; 0 &amp; cx_2 &amp; T_x \cdot f \\
  16579. 0 &amp; f &amp; cy &amp; 0 \\
  16580. 0 &amp; 0 &amp; 1 &amp; 0
  16581. \end{bmatrix} ,\)
  16582. \(\texttt{Q} = \begin{bmatrix}
  16583. 1 &amp; 0 &amp; 0 &amp; -cx_1 \\
  16584. 0 &amp; 1 &amp; 0 &amp; -cy \\
  16585. 0 &amp; 0 &amp; 0 &amp; f \\
  16586. 0 &amp; 0 &amp; -\frac{1}{T_x} &amp; \frac{cx_1 - cx_2}{T_x}
  16587. \end{bmatrix} \)
  16588. where \(T_x\) is a horizontal shift between the cameras and \(cx_1=cx_2\) if
  16589. REF: CALIB_ZERO_DISPARITY is set.
  16590. <ul>
  16591. <li>
  16592. <b>Vertical stereo</b>: the first and the second camera views are shifted relative to each other
  16593. mainly in the vertical direction (and probably a bit in the horizontal direction too). The epipolar
  16594. lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
  16595. </li>
  16596. </ul>
  16597. \(\texttt{P1} = \begin{bmatrix}
  16598. f &amp; 0 &amp; cx &amp; 0 \\
  16599. 0 &amp; f &amp; cy_1 &amp; 0 \\
  16600. 0 &amp; 0 &amp; 1 &amp; 0
  16601. \end{bmatrix}\)
  16602. \(\texttt{P2} = \begin{bmatrix}
  16603. f &amp; 0 &amp; cx &amp; 0 \\
  16604. 0 &amp; f &amp; cy_2 &amp; T_y \cdot f \\
  16605. 0 &amp; 0 &amp; 1 &amp; 0
  16606. \end{bmatrix},\)
  16607. \(\texttt{Q} = \begin{bmatrix}
  16608. 1 &amp; 0 &amp; 0 &amp; -cx \\
  16609. 0 &amp; 1 &amp; 0 &amp; -cy_1 \\
  16610. 0 &amp; 0 &amp; 0 &amp; f \\
  16611. 0 &amp; 0 &amp; -\frac{1}{T_y} &amp; \frac{cy_1 - cy_2}{T_y}
  16612. \end{bmatrix} \)
  16613. where \(T_y\) is a vertical shift between the cameras and \(cy_1=cy_2\) if
  16614. REF: CALIB_ZERO_DISPARITY is set.
  16615. As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
  16616. matrices. The matrices, together with R1 and R2 , can then be passed to #initUndistortRectifyMap to
  16617. initialize the rectification map for each camera.
  16618. See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
  16619. the corresponding image regions. This means that the images are well rectified, which is what most
  16620. stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
  16621. their interiors are all valid pixels.
  16622. ![image](pics/stereo_undistort.jpg)</dd>
  16623. </dl>
  16624. </li>
  16625. </ul>
  16626. <a name="stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-">
  16627. <!-- -->
  16628. </a>
  16629. <ul class="blockList">
  16630. <li class="blockList">
  16631. <h4>stereoRectify</h4>
  16632. <pre>public static&nbsp;void&nbsp;stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  16633. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  16634. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  16635. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  16636. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  16637. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  16638. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  16639. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  16640. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  16641. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  16642. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  16643. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  16644. int&nbsp;flags)</pre>
  16645. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  16646. <dl>
  16647. <dt><span class="paramLabel">Parameters:</span></dt>
  16648. <dd><code>cameraMatrix1</code> - First camera intrinsic matrix.</dd>
  16649. <dd><code>distCoeffs1</code> - First camera distortion parameters.</dd>
  16650. <dd><code>cameraMatrix2</code> - Second camera intrinsic matrix.</dd>
  16651. <dd><code>distCoeffs2</code> - Second camera distortion parameters.</dd>
  16652. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  16653. <dd><code>R</code> - Rotation matrix from the coordinate system of the first camera to the second camera,
  16654. see REF: stereoCalibrate.</dd>
  16655. <dd><code>T</code> - Translation vector from the coordinate system of the first camera to the second camera,
  16656. see REF: stereoCalibrate.</dd>
  16657. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera. This matrix
  16658. brings points given in the unrectified first camera's coordinate system to points in the rectified
  16659. first camera's coordinate system. In more technical terms, it performs a change of basis from the
  16660. unrectified first camera's coordinate system to the rectified first camera's coordinate system.</dd>
  16661. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera. This matrix
  16662. brings points given in the unrectified second camera's coordinate system to points in the rectified
  16663. second camera's coordinate system. In more technical terms, it performs a change of basis from the
  16664. unrectified second camera's coordinate system to the rectified second camera's coordinate system.</dd>
  16665. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  16666. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16667. rectified first camera's image.</dd>
  16668. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  16669. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16670. rectified second camera's image.</dd>
  16671. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see REF: reprojectImageTo3D).</dd>
  16672. <dd><code>flags</code> - Operation flags that may be zero or REF: CALIB_ZERO_DISPARITY . If the flag is set,
  16673. the function makes the principal points of each camera have the same pixel coordinates in the
  16674. rectified views. And if the flag is not set, the function may still shift the images in the
  16675. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  16676. useful image area.
  16677. scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
  16678. images are zoomed and shifted so that only valid pixels are visible (no black areas after
  16679. rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
  16680. pixels from the original images from the cameras are retained in the rectified images (no source
  16681. image pixels are lost). Any intermediate value yields an intermediate result between
  16682. those two extreme cases.
  16683. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  16684. is passed (default), it is set to the original imageSize . Setting it to a larger value can help you
  16685. preserve details in the original image, especially when there is a big radial distortion.
  16686. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16687. (see the picture below).
  16688. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16689. (see the picture below).
  16690. The function computes the rotation matrices for each camera that (virtually) make both camera image
  16691. planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
  16692. the dense stereo correspondence problem. The function takes the matrices computed by #stereoCalibrate
  16693. as input. As output, it provides two rotation matrices and also two projection matrices in the new
  16694. coordinates. The function distinguishes the following two cases:
  16695. <ul>
  16696. <li>
  16697. <b>Horizontal stereo</b>: the first and the second camera views are shifted relative to each other
  16698. mainly along the x-axis (with possible small vertical shift). In the rectified images, the
  16699. corresponding epipolar lines in the left and right cameras are horizontal and have the same
  16700. y-coordinate. P1 and P2 look like:
  16701. </li>
  16702. </ul>
  16703. \(\texttt{P1} = \begin{bmatrix}
  16704. f &amp; 0 &amp; cx_1 &amp; 0 \\
  16705. 0 &amp; f &amp; cy &amp; 0 \\
  16706. 0 &amp; 0 &amp; 1 &amp; 0
  16707. \end{bmatrix}\)
  16708. \(\texttt{P2} = \begin{bmatrix}
  16709. f &amp; 0 &amp; cx_2 &amp; T_x \cdot f \\
  16710. 0 &amp; f &amp; cy &amp; 0 \\
  16711. 0 &amp; 0 &amp; 1 &amp; 0
  16712. \end{bmatrix} ,\)
  16713. \(\texttt{Q} = \begin{bmatrix}
  16714. 1 &amp; 0 &amp; 0 &amp; -cx_1 \\
  16715. 0 &amp; 1 &amp; 0 &amp; -cy \\
  16716. 0 &amp; 0 &amp; 0 &amp; f \\
  16717. 0 &amp; 0 &amp; -\frac{1}{T_x} &amp; \frac{cx_1 - cx_2}{T_x}
  16718. \end{bmatrix} \)
  16719. where \(T_x\) is a horizontal shift between the cameras and \(cx_1=cx_2\) if
  16720. REF: CALIB_ZERO_DISPARITY is set.
  16721. <ul>
  16722. <li>
  16723. <b>Vertical stereo</b>: the first and the second camera views are shifted relative to each other
  16724. mainly in the vertical direction (and probably a bit in the horizontal direction too). The epipolar
  16725. lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
  16726. </li>
  16727. </ul>
  16728. \(\texttt{P1} = \begin{bmatrix}
  16729. f &amp; 0 &amp; cx &amp; 0 \\
  16730. 0 &amp; f &amp; cy_1 &amp; 0 \\
  16731. 0 &amp; 0 &amp; 1 &amp; 0
  16732. \end{bmatrix}\)
  16733. \(\texttt{P2} = \begin{bmatrix}
  16734. f &amp; 0 &amp; cx &amp; 0 \\
  16735. 0 &amp; f &amp; cy_2 &amp; T_y \cdot f \\
  16736. 0 &amp; 0 &amp; 1 &amp; 0
  16737. \end{bmatrix},\)
  16738. \(\texttt{Q} = \begin{bmatrix}
  16739. 1 &amp; 0 &amp; 0 &amp; -cx \\
  16740. 0 &amp; 1 &amp; 0 &amp; -cy_1 \\
  16741. 0 &amp; 0 &amp; 0 &amp; f \\
  16742. 0 &amp; 0 &amp; -\frac{1}{T_y} &amp; \frac{cy_1 - cy_2}{T_y}
  16743. \end{bmatrix} \)
  16744. where \(T_y\) is a vertical shift between the cameras and \(cy_1=cy_2\) if
  16745. REF: CALIB_ZERO_DISPARITY is set.
  16746. As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
  16747. matrices. The matrices, together with R1 and R2 , can then be passed to #initUndistortRectifyMap to
  16748. initialize the rectification map for each camera.
  16749. See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
  16750. the corresponding image regions. This means that the images are well rectified, which is what most
  16751. stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
  16752. their interiors are all valid pixels.
  16753. ![image](pics/stereo_undistort.jpg)</dd>
  16754. </dl>
  16755. </li>
  16756. </ul>
  16757. <a name="stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-">
  16758. <!-- -->
  16759. </a>
  16760. <ul class="blockList">
  16761. <li class="blockList">
  16762. <h4>stereoRectify</h4>
  16763. <pre>public static&nbsp;void&nbsp;stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  16764. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  16765. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  16766. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  16767. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  16768. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  16769. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  16770. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  16771. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  16772. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  16773. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  16774. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  16775. int&nbsp;flags,
  16776. double&nbsp;alpha)</pre>
  16777. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  16778. <dl>
  16779. <dt><span class="paramLabel">Parameters:</span></dt>
  16780. <dd><code>cameraMatrix1</code> - First camera intrinsic matrix.</dd>
  16781. <dd><code>distCoeffs1</code> - First camera distortion parameters.</dd>
  16782. <dd><code>cameraMatrix2</code> - Second camera intrinsic matrix.</dd>
  16783. <dd><code>distCoeffs2</code> - Second camera distortion parameters.</dd>
  16784. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  16785. <dd><code>R</code> - Rotation matrix from the coordinate system of the first camera to the second camera,
  16786. see REF: stereoCalibrate.</dd>
  16787. <dd><code>T</code> - Translation vector from the coordinate system of the first camera to the second camera,
  16788. see REF: stereoCalibrate.</dd>
  16789. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera. This matrix
  16790. brings points given in the unrectified first camera's coordinate system to points in the rectified
  16791. first camera's coordinate system. In more technical terms, it performs a change of basis from the
  16792. unrectified first camera's coordinate system to the rectified first camera's coordinate system.</dd>
  16793. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera. This matrix
  16794. brings points given in the unrectified second camera's coordinate system to points in the rectified
  16795. second camera's coordinate system. In more technical terms, it performs a change of basis from the
  16796. unrectified second camera's coordinate system to the rectified second camera's coordinate system.</dd>
  16797. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  16798. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16799. rectified first camera's image.</dd>
  16800. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  16801. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16802. rectified second camera's image.</dd>
  16803. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see REF: reprojectImageTo3D).</dd>
  16804. <dd><code>flags</code> - Operation flags that may be zero or REF: CALIB_ZERO_DISPARITY . If the flag is set,
  16805. the function makes the principal points of each camera have the same pixel coordinates in the
  16806. rectified views. And if the flag is not set, the function may still shift the images in the
  16807. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  16808. useful image area.</dd>
  16809. <dd><code>alpha</code> - Free scaling parameter. If it is -1 or absent, the function performs the default
  16810. scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
  16811. images are zoomed and shifted so that only valid pixels are visible (no black areas after
  16812. rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
  16813. pixels from the original images from the cameras are retained in the rectified images (no source
  16814. image pixels are lost). Any intermediate value yields an intermediate result between
  16815. those two extreme cases.
  16816. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  16817. is passed (default), it is set to the original imageSize . Setting it to a larger value can help you
  16818. preserve details in the original image, especially when there is a big radial distortion.
  16819. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16820. (see the picture below).
  16821. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16822. (see the picture below).
  16823. The function computes the rotation matrices for each camera that (virtually) make both camera image
  16824. planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
  16825. the dense stereo correspondence problem. The function takes the matrices computed by #stereoCalibrate
  16826. as input. As output, it provides two rotation matrices and also two projection matrices in the new
  16827. coordinates. The function distinguishes the following two cases:
  16828. <ul>
  16829. <li>
  16830. <b>Horizontal stereo</b>: the first and the second camera views are shifted relative to each other
  16831. mainly along the x-axis (with possible small vertical shift). In the rectified images, the
  16832. corresponding epipolar lines in the left and right cameras are horizontal and have the same
  16833. y-coordinate. P1 and P2 look like:
  16834. </li>
  16835. </ul>
  16836. \(\texttt{P1} = \begin{bmatrix}
  16837. f &amp; 0 &amp; cx_1 &amp; 0 \\
  16838. 0 &amp; f &amp; cy &amp; 0 \\
  16839. 0 &amp; 0 &amp; 1 &amp; 0
  16840. \end{bmatrix}\)
  16841. \(\texttt{P2} = \begin{bmatrix}
  16842. f &amp; 0 &amp; cx_2 &amp; T_x \cdot f \\
  16843. 0 &amp; f &amp; cy &amp; 0 \\
  16844. 0 &amp; 0 &amp; 1 &amp; 0
  16845. \end{bmatrix} ,\)
  16846. \(\texttt{Q} = \begin{bmatrix}
  16847. 1 &amp; 0 &amp; 0 &amp; -cx_1 \\
  16848. 0 &amp; 1 &amp; 0 &amp; -cy \\
  16849. 0 &amp; 0 &amp; 0 &amp; f \\
  16850. 0 &amp; 0 &amp; -\frac{1}{T_x} &amp; \frac{cx_1 - cx_2}{T_x}
  16851. \end{bmatrix} \)
  16852. where \(T_x\) is a horizontal shift between the cameras and \(cx_1=cx_2\) if
  16853. REF: CALIB_ZERO_DISPARITY is set.
  16854. <ul>
  16855. <li>
  16856. <b>Vertical stereo</b>: the first and the second camera views are shifted relative to each other
  16857. mainly in the vertical direction (and probably a bit in the horizontal direction too). The epipolar
  16858. lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
  16859. </li>
  16860. </ul>
  16861. \(\texttt{P1} = \begin{bmatrix}
  16862. f &amp; 0 &amp; cx &amp; 0 \\
  16863. 0 &amp; f &amp; cy_1 &amp; 0 \\
  16864. 0 &amp; 0 &amp; 1 &amp; 0
  16865. \end{bmatrix}\)
  16866. \(\texttt{P2} = \begin{bmatrix}
  16867. f &amp; 0 &amp; cx &amp; 0 \\
  16868. 0 &amp; f &amp; cy_2 &amp; T_y \cdot f \\
  16869. 0 &amp; 0 &amp; 1 &amp; 0
  16870. \end{bmatrix},\)
  16871. \(\texttt{Q} = \begin{bmatrix}
  16872. 1 &amp; 0 &amp; 0 &amp; -cx \\
  16873. 0 &amp; 1 &amp; 0 &amp; -cy_1 \\
  16874. 0 &amp; 0 &amp; 0 &amp; f \\
  16875. 0 &amp; 0 &amp; -\frac{1}{T_y} &amp; \frac{cy_1 - cy_2}{T_y}
  16876. \end{bmatrix} \)
  16877. where \(T_y\) is a vertical shift between the cameras and \(cy_1=cy_2\) if
  16878. REF: CALIB_ZERO_DISPARITY is set.
  16879. As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
  16880. matrices. The matrices, together with R1 and R2 , can then be passed to #initUndistortRectifyMap to
  16881. initialize the rectification map for each camera.
  16882. See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
  16883. the corresponding image regions. This means that the images are well rectified, which is what most
  16884. stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
  16885. their interiors are all valid pixels.
  16886. ![image](pics/stereo_undistort.jpg)</dd>
  16887. </dl>
  16888. </li>
  16889. </ul>
  16890. <a name="stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-org.opencv.core.Size-">
  16891. <!-- -->
  16892. </a>
  16893. <ul class="blockList">
  16894. <li class="blockList">
  16895. <h4>stereoRectify</h4>
  16896. <pre>public static&nbsp;void&nbsp;stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  16897. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  16898. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  16899. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  16900. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  16901. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  16902. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  16903. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  16904. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  16905. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  16906. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  16907. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  16908. int&nbsp;flags,
  16909. double&nbsp;alpha,
  16910. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize)</pre>
  16911. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  16912. <dl>
  16913. <dt><span class="paramLabel">Parameters:</span></dt>
  16914. <dd><code>cameraMatrix1</code> - First camera intrinsic matrix.</dd>
  16915. <dd><code>distCoeffs1</code> - First camera distortion parameters.</dd>
  16916. <dd><code>cameraMatrix2</code> - Second camera intrinsic matrix.</dd>
  16917. <dd><code>distCoeffs2</code> - Second camera distortion parameters.</dd>
  16918. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  16919. <dd><code>R</code> - Rotation matrix from the coordinate system of the first camera to the second camera,
  16920. see REF: stereoCalibrate.</dd>
  16921. <dd><code>T</code> - Translation vector from the coordinate system of the first camera to the second camera,
  16922. see REF: stereoCalibrate.</dd>
  16923. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera. This matrix
  16924. brings points given in the unrectified first camera's coordinate system to points in the rectified
  16925. first camera's coordinate system. In more technical terms, it performs a change of basis from the
  16926. unrectified first camera's coordinate system to the rectified first camera's coordinate system.</dd>
  16927. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera. This matrix
  16928. brings points given in the unrectified second camera's coordinate system to points in the rectified
  16929. second camera's coordinate system. In more technical terms, it performs a change of basis from the
  16930. unrectified second camera's coordinate system to the rectified second camera's coordinate system.</dd>
  16931. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  16932. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16933. rectified first camera's image.</dd>
  16934. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  16935. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  16936. rectified second camera's image.</dd>
  16937. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see REF: reprojectImageTo3D).</dd>
  16938. <dd><code>flags</code> - Operation flags that may be zero or REF: CALIB_ZERO_DISPARITY . If the flag is set,
  16939. the function makes the principal points of each camera have the same pixel coordinates in the
  16940. rectified views. And if the flag is not set, the function may still shift the images in the
  16941. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  16942. useful image area.</dd>
  16943. <dd><code>alpha</code> - Free scaling parameter. If it is -1 or absent, the function performs the default
  16944. scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
  16945. images are zoomed and shifted so that only valid pixels are visible (no black areas after
  16946. rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
  16947. pixels from the original images from the cameras are retained in the rectified images (no source
  16948. image pixels are lost). Any intermediate value yields an intermediate result between
  16949. those two extreme cases.</dd>
  16950. <dd><code>newImageSize</code> - New image resolution after rectification. The same size should be passed to
  16951. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  16952. is passed (default), it is set to the original imageSize . Setting it to a larger value can help you
  16953. preserve details in the original image, especially when there is a big radial distortion.
  16954. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16955. (see the picture below).
  16956. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  16957. (see the picture below).
  16958. The function computes the rotation matrices for each camera that (virtually) make both camera image
  16959. planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
  16960. the dense stereo correspondence problem. The function takes the matrices computed by #stereoCalibrate
  16961. as input. As output, it provides two rotation matrices and also two projection matrices in the new
  16962. coordinates. The function distinguishes the following two cases:
  16963. <ul>
  16964. <li>
  16965. <b>Horizontal stereo</b>: the first and the second camera views are shifted relative to each other
  16966. mainly along the x-axis (with possible small vertical shift). In the rectified images, the
  16967. corresponding epipolar lines in the left and right cameras are horizontal and have the same
  16968. y-coordinate. P1 and P2 look like:
  16969. </li>
  16970. </ul>
  16971. \(\texttt{P1} = \begin{bmatrix}
  16972. f &amp; 0 &amp; cx_1 &amp; 0 \\
  16973. 0 &amp; f &amp; cy &amp; 0 \\
  16974. 0 &amp; 0 &amp; 1 &amp; 0
  16975. \end{bmatrix}\)
  16976. \(\texttt{P2} = \begin{bmatrix}
  16977. f &amp; 0 &amp; cx_2 &amp; T_x \cdot f \\
  16978. 0 &amp; f &amp; cy &amp; 0 \\
  16979. 0 &amp; 0 &amp; 1 &amp; 0
  16980. \end{bmatrix} ,\)
  16981. \(\texttt{Q} = \begin{bmatrix}
  16982. 1 &amp; 0 &amp; 0 &amp; -cx_1 \\
  16983. 0 &amp; 1 &amp; 0 &amp; -cy \\
  16984. 0 &amp; 0 &amp; 0 &amp; f \\
  16985. 0 &amp; 0 &amp; -\frac{1}{T_x} &amp; \frac{cx_1 - cx_2}{T_x}
  16986. \end{bmatrix} \)
  16987. where \(T_x\) is a horizontal shift between the cameras and \(cx_1=cx_2\) if
  16988. REF: CALIB_ZERO_DISPARITY is set.
  16989. <ul>
  16990. <li>
  16991. <b>Vertical stereo</b>: the first and the second camera views are shifted relative to each other
  16992. mainly in the vertical direction (and probably a bit in the horizontal direction too). The epipolar
  16993. lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
  16994. </li>
  16995. </ul>
  16996. \(\texttt{P1} = \begin{bmatrix}
  16997. f &amp; 0 &amp; cx &amp; 0 \\
  16998. 0 &amp; f &amp; cy_1 &amp; 0 \\
  16999. 0 &amp; 0 &amp; 1 &amp; 0
  17000. \end{bmatrix}\)
  17001. \(\texttt{P2} = \begin{bmatrix}
  17002. f &amp; 0 &amp; cx &amp; 0 \\
  17003. 0 &amp; f &amp; cy_2 &amp; T_y \cdot f \\
  17004. 0 &amp; 0 &amp; 1 &amp; 0
  17005. \end{bmatrix},\)
  17006. \(\texttt{Q} = \begin{bmatrix}
  17007. 1 &amp; 0 &amp; 0 &amp; -cx \\
  17008. 0 &amp; 1 &amp; 0 &amp; -cy_1 \\
  17009. 0 &amp; 0 &amp; 0 &amp; f \\
  17010. 0 &amp; 0 &amp; -\frac{1}{T_y} &amp; \frac{cy_1 - cy_2}{T_y}
  17011. \end{bmatrix} \)
  17012. where \(T_y\) is a vertical shift between the cameras and \(cy_1=cy_2\) if
  17013. REF: CALIB_ZERO_DISPARITY is set.
  17014. As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
  17015. matrices. The matrices, together with R1 and R2 , can then be passed to #initUndistortRectifyMap to
  17016. initialize the rectification map for each camera.
  17017. See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
  17018. the corresponding image regions. This means that the images are well rectified, which is what most
  17019. stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
  17020. their interiors are all valid pixels.
  17021. ![image](pics/stereo_undistort.jpg)</dd>
  17022. </dl>
  17023. </li>
  17024. </ul>
  17025. <a name="stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-org.opencv.core.Size-org.opencv.core.Rect-">
  17026. <!-- -->
  17027. </a>
  17028. <ul class="blockList">
  17029. <li class="blockList">
  17030. <h4>stereoRectify</h4>
  17031. <pre>public static&nbsp;void&nbsp;stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  17032. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  17033. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  17034. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  17035. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  17036. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  17037. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  17038. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  17039. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  17040. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  17041. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  17042. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  17043. int&nbsp;flags,
  17044. double&nbsp;alpha,
  17045. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  17046. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI1)</pre>
  17047. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  17048. <dl>
  17049. <dt><span class="paramLabel">Parameters:</span></dt>
  17050. <dd><code>cameraMatrix1</code> - First camera intrinsic matrix.</dd>
  17051. <dd><code>distCoeffs1</code> - First camera distortion parameters.</dd>
  17052. <dd><code>cameraMatrix2</code> - Second camera intrinsic matrix.</dd>
  17053. <dd><code>distCoeffs2</code> - Second camera distortion parameters.</dd>
  17054. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  17055. <dd><code>R</code> - Rotation matrix from the coordinate system of the first camera to the second camera,
  17056. see REF: stereoCalibrate.</dd>
  17057. <dd><code>T</code> - Translation vector from the coordinate system of the first camera to the second camera,
  17058. see REF: stereoCalibrate.</dd>
  17059. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera. This matrix
  17060. brings points given in the unrectified first camera's coordinate system to points in the rectified
  17061. first camera's coordinate system. In more technical terms, it performs a change of basis from the
  17062. unrectified first camera's coordinate system to the rectified first camera's coordinate system.</dd>
  17063. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera. This matrix
  17064. brings points given in the unrectified second camera's coordinate system to points in the rectified
  17065. second camera's coordinate system. In more technical terms, it performs a change of basis from the
  17066. unrectified second camera's coordinate system to the rectified second camera's coordinate system.</dd>
  17067. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  17068. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  17069. rectified first camera's image.</dd>
  17070. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  17071. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  17072. rectified second camera's image.</dd>
  17073. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see REF: reprojectImageTo3D).</dd>
  17074. <dd><code>flags</code> - Operation flags that may be zero or REF: CALIB_ZERO_DISPARITY . If the flag is set,
  17075. the function makes the principal points of each camera have the same pixel coordinates in the
  17076. rectified views. And if the flag is not set, the function may still shift the images in the
  17077. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  17078. useful image area.</dd>
  17079. <dd><code>alpha</code> - Free scaling parameter. If it is -1 or absent, the function performs the default
  17080. scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
  17081. images are zoomed and shifted so that only valid pixels are visible (no black areas after
  17082. rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
  17083. pixels from the original images from the cameras are retained in the rectified images (no source
  17084. image pixels are lost). Any intermediate value yields an intermediate result between
  17085. those two extreme cases.</dd>
  17086. <dd><code>newImageSize</code> - New image resolution after rectification. The same size should be passed to
  17087. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  17088. is passed (default), it is set to the original imageSize . Setting it to a larger value can help you
  17089. preserve details in the original image, especially when there is a big radial distortion.</dd>
  17090. <dd><code>validPixROI1</code> - Optional output rectangles inside the rectified images where all the pixels
  17091. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  17092. (see the picture below).
  17093. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  17094. (see the picture below).
  17095. The function computes the rotation matrices for each camera that (virtually) make both camera image
  17096. planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
  17097. the dense stereo correspondence problem. The function takes the matrices computed by #stereoCalibrate
  17098. as input. As output, it provides two rotation matrices and also two projection matrices in the new
  17099. coordinates. The function distinguishes the following two cases:
  17100. <ul>
  17101. <li>
  17102. <b>Horizontal stereo</b>: the first and the second camera views are shifted relative to each other
  17103. mainly along the x-axis (with possible small vertical shift). In the rectified images, the
  17104. corresponding epipolar lines in the left and right cameras are horizontal and have the same
  17105. y-coordinate. P1 and P2 look like:
  17106. </li>
  17107. </ul>
  17108. \(\texttt{P1} = \begin{bmatrix}
  17109. f &amp; 0 &amp; cx_1 &amp; 0 \\
  17110. 0 &amp; f &amp; cy &amp; 0 \\
  17111. 0 &amp; 0 &amp; 1 &amp; 0
  17112. \end{bmatrix}\)
  17113. \(\texttt{P2} = \begin{bmatrix}
  17114. f &amp; 0 &amp; cx_2 &amp; T_x \cdot f \\
  17115. 0 &amp; f &amp; cy &amp; 0 \\
  17116. 0 &amp; 0 &amp; 1 &amp; 0
  17117. \end{bmatrix} ,\)
  17118. \(\texttt{Q} = \begin{bmatrix}
  17119. 1 &amp; 0 &amp; 0 &amp; -cx_1 \\
  17120. 0 &amp; 1 &amp; 0 &amp; -cy \\
  17121. 0 &amp; 0 &amp; 0 &amp; f \\
  17122. 0 &amp; 0 &amp; -\frac{1}{T_x} &amp; \frac{cx_1 - cx_2}{T_x}
  17123. \end{bmatrix} \)
  17124. where \(T_x\) is a horizontal shift between the cameras and \(cx_1=cx_2\) if
  17125. REF: CALIB_ZERO_DISPARITY is set.
  17126. <ul>
  17127. <li>
  17128. <b>Vertical stereo</b>: the first and the second camera views are shifted relative to each other
  17129. mainly in the vertical direction (and probably a bit in the horizontal direction too). The epipolar
  17130. lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
  17131. </li>
  17132. </ul>
  17133. \(\texttt{P1} = \begin{bmatrix}
  17134. f &amp; 0 &amp; cx &amp; 0 \\
  17135. 0 &amp; f &amp; cy_1 &amp; 0 \\
  17136. 0 &amp; 0 &amp; 1 &amp; 0
  17137. \end{bmatrix}\)
  17138. \(\texttt{P2} = \begin{bmatrix}
  17139. f &amp; 0 &amp; cx &amp; 0 \\
  17140. 0 &amp; f &amp; cy_2 &amp; T_y \cdot f \\
  17141. 0 &amp; 0 &amp; 1 &amp; 0
  17142. \end{bmatrix},\)
  17143. \(\texttt{Q} = \begin{bmatrix}
  17144. 1 &amp; 0 &amp; 0 &amp; -cx \\
  17145. 0 &amp; 1 &amp; 0 &amp; -cy_1 \\
  17146. 0 &amp; 0 &amp; 0 &amp; f \\
  17147. 0 &amp; 0 &amp; -\frac{1}{T_y} &amp; \frac{cy_1 - cy_2}{T_y}
  17148. \end{bmatrix} \)
  17149. where \(T_y\) is a vertical shift between the cameras and \(cy_1=cy_2\) if
  17150. REF: CALIB_ZERO_DISPARITY is set.
  17151. As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
  17152. matrices. The matrices, together with R1 and R2 , can then be passed to #initUndistortRectifyMap to
  17153. initialize the rectification map for each camera.
  17154. See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
  17155. the corresponding image regions. This means that the images are well rectified, which is what most
  17156. stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
  17157. their interiors are all valid pixels.
  17158. ![image](pics/stereo_undistort.jpg)</dd>
  17159. </dl>
  17160. </li>
  17161. </ul>
  17162. <a name="stereoRectify-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-int-double-org.opencv.core.Size-org.opencv.core.Rect-org.opencv.core.Rect-">
  17163. <!-- -->
  17164. </a>
  17165. <ul class="blockList">
  17166. <li class="blockList">
  17167. <h4>stereoRectify</h4>
  17168. <pre>public static&nbsp;void&nbsp;stereoRectify(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix1,
  17169. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs1,
  17170. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix2,
  17171. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs2,
  17172. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imageSize,
  17173. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  17174. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;T,
  17175. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R1,
  17176. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R2,
  17177. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P1,
  17178. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P2,
  17179. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;Q,
  17180. int&nbsp;flags,
  17181. double&nbsp;alpha,
  17182. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;newImageSize,
  17183. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI1,
  17184. <a href="../../../org/opencv/core/Rect.html" title="class in org.opencv.core">Rect</a>&nbsp;validPixROI2)</pre>
  17185. <div class="block">Computes rectification transforms for each head of a calibrated stereo camera.</div>
  17186. <dl>
  17187. <dt><span class="paramLabel">Parameters:</span></dt>
  17188. <dd><code>cameraMatrix1</code> - First camera intrinsic matrix.</dd>
  17189. <dd><code>distCoeffs1</code> - First camera distortion parameters.</dd>
  17190. <dd><code>cameraMatrix2</code> - Second camera intrinsic matrix.</dd>
  17191. <dd><code>distCoeffs2</code> - Second camera distortion parameters.</dd>
  17192. <dd><code>imageSize</code> - Size of the image used for stereo calibration.</dd>
  17193. <dd><code>R</code> - Rotation matrix from the coordinate system of the first camera to the second camera,
  17194. see REF: stereoCalibrate.</dd>
  17195. <dd><code>T</code> - Translation vector from the coordinate system of the first camera to the second camera,
  17196. see REF: stereoCalibrate.</dd>
  17197. <dd><code>R1</code> - Output 3x3 rectification transform (rotation matrix) for the first camera. This matrix
  17198. brings points given in the unrectified first camera's coordinate system to points in the rectified
  17199. first camera's coordinate system. In more technical terms, it performs a change of basis from the
  17200. unrectified first camera's coordinate system to the rectified first camera's coordinate system.</dd>
  17201. <dd><code>R2</code> - Output 3x3 rectification transform (rotation matrix) for the second camera. This matrix
  17202. brings points given in the unrectified second camera's coordinate system to points in the rectified
  17203. second camera's coordinate system. In more technical terms, it performs a change of basis from the
  17204. unrectified second camera's coordinate system to the rectified second camera's coordinate system.</dd>
  17205. <dd><code>P1</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the first
  17206. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  17207. rectified first camera's image.</dd>
  17208. <dd><code>P2</code> - Output 3x4 projection matrix in the new (rectified) coordinate systems for the second
  17209. camera, i.e. it projects points given in the rectified first camera coordinate system into the
  17210. rectified second camera's image.</dd>
  17211. <dd><code>Q</code> - Output \(4 \times 4\) disparity-to-depth mapping matrix (see REF: reprojectImageTo3D).</dd>
  17212. <dd><code>flags</code> - Operation flags that may be zero or REF: CALIB_ZERO_DISPARITY . If the flag is set,
  17213. the function makes the principal points of each camera have the same pixel coordinates in the
  17214. rectified views. And if the flag is not set, the function may still shift the images in the
  17215. horizontal or vertical direction (depending on the orientation of epipolar lines) to maximize the
  17216. useful image area.</dd>
  17217. <dd><code>alpha</code> - Free scaling parameter. If it is -1 or absent, the function performs the default
  17218. scaling. Otherwise, the parameter should be between 0 and 1. alpha=0 means that the rectified
  17219. images are zoomed and shifted so that only valid pixels are visible (no black areas after
  17220. rectification). alpha=1 means that the rectified image is decimated and shifted so that all the
  17221. pixels from the original images from the cameras are retained in the rectified images (no source
  17222. image pixels are lost). Any intermediate value yields an intermediate result between
  17223. those two extreme cases.</dd>
  17224. <dd><code>newImageSize</code> - New image resolution after rectification. The same size should be passed to
  17225. #initUndistortRectifyMap (see the stereo_calib.cpp sample in OpenCV samples directory). When (0,0)
  17226. is passed (default), it is set to the original imageSize . Setting it to a larger value can help you
  17227. preserve details in the original image, especially when there is a big radial distortion.</dd>
  17228. <dd><code>validPixROI1</code> - Optional output rectangles inside the rectified images where all the pixels
  17229. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  17230. (see the picture below).</dd>
  17231. <dd><code>validPixROI2</code> - Optional output rectangles inside the rectified images where all the pixels
  17232. are valid. If alpha=0 , the ROIs cover the whole images. Otherwise, they are likely to be smaller
  17233. (see the picture below).
  17234. The function computes the rotation matrices for each camera that (virtually) make both camera image
  17235. planes the same plane. Consequently, this makes all the epipolar lines parallel and thus simplifies
  17236. the dense stereo correspondence problem. The function takes the matrices computed by #stereoCalibrate
  17237. as input. As output, it provides two rotation matrices and also two projection matrices in the new
  17238. coordinates. The function distinguishes the following two cases:
  17239. <ul>
  17240. <li>
  17241. <b>Horizontal stereo</b>: the first and the second camera views are shifted relative to each other
  17242. mainly along the x-axis (with possible small vertical shift). In the rectified images, the
  17243. corresponding epipolar lines in the left and right cameras are horizontal and have the same
  17244. y-coordinate. P1 and P2 look like:
  17245. </li>
  17246. </ul>
  17247. \(\texttt{P1} = \begin{bmatrix}
  17248. f &amp; 0 &amp; cx_1 &amp; 0 \\
  17249. 0 &amp; f &amp; cy &amp; 0 \\
  17250. 0 &amp; 0 &amp; 1 &amp; 0
  17251. \end{bmatrix}\)
  17252. \(\texttt{P2} = \begin{bmatrix}
  17253. f &amp; 0 &amp; cx_2 &amp; T_x \cdot f \\
  17254. 0 &amp; f &amp; cy &amp; 0 \\
  17255. 0 &amp; 0 &amp; 1 &amp; 0
  17256. \end{bmatrix} ,\)
  17257. \(\texttt{Q} = \begin{bmatrix}
  17258. 1 &amp; 0 &amp; 0 &amp; -cx_1 \\
  17259. 0 &amp; 1 &amp; 0 &amp; -cy \\
  17260. 0 &amp; 0 &amp; 0 &amp; f \\
  17261. 0 &amp; 0 &amp; -\frac{1}{T_x} &amp; \frac{cx_1 - cx_2}{T_x}
  17262. \end{bmatrix} \)
  17263. where \(T_x\) is a horizontal shift between the cameras and \(cx_1=cx_2\) if
  17264. REF: CALIB_ZERO_DISPARITY is set.
  17265. <ul>
  17266. <li>
  17267. <b>Vertical stereo</b>: the first and the second camera views are shifted relative to each other
  17268. mainly in the vertical direction (and probably a bit in the horizontal direction too). The epipolar
  17269. lines in the rectified images are vertical and have the same x-coordinate. P1 and P2 look like:
  17270. </li>
  17271. </ul>
  17272. \(\texttt{P1} = \begin{bmatrix}
  17273. f &amp; 0 &amp; cx &amp; 0 \\
  17274. 0 &amp; f &amp; cy_1 &amp; 0 \\
  17275. 0 &amp; 0 &amp; 1 &amp; 0
  17276. \end{bmatrix}\)
  17277. \(\texttt{P2} = \begin{bmatrix}
  17278. f &amp; 0 &amp; cx &amp; 0 \\
  17279. 0 &amp; f &amp; cy_2 &amp; T_y \cdot f \\
  17280. 0 &amp; 0 &amp; 1 &amp; 0
  17281. \end{bmatrix},\)
  17282. \(\texttt{Q} = \begin{bmatrix}
  17283. 1 &amp; 0 &amp; 0 &amp; -cx \\
  17284. 0 &amp; 1 &amp; 0 &amp; -cy_1 \\
  17285. 0 &amp; 0 &amp; 0 &amp; f \\
  17286. 0 &amp; 0 &amp; -\frac{1}{T_y} &amp; \frac{cy_1 - cy_2}{T_y}
  17287. \end{bmatrix} \)
  17288. where \(T_y\) is a vertical shift between the cameras and \(cy_1=cy_2\) if
  17289. REF: CALIB_ZERO_DISPARITY is set.
  17290. As you can see, the first three columns of P1 and P2 will effectively be the new "rectified" camera
  17291. matrices. The matrices, together with R1 and R2 , can then be passed to #initUndistortRectifyMap to
  17292. initialize the rectification map for each camera.
  17293. See below the screenshot from the stereo_calib.cpp sample. Some red horizontal lines pass through
  17294. the corresponding image regions. This means that the images are well rectified, which is what most
  17295. stereo correspondence algorithms rely on. The green rectangles are roi1 and roi2 . You see that
  17296. their interiors are all valid pixels.
  17297. ![image](pics/stereo_undistort.jpg)</dd>
  17298. </dl>
  17299. </li>
  17300. </ul>
  17301. <a name="stereoRectifyUncalibrated-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-">
  17302. <!-- -->
  17303. </a>
  17304. <ul class="blockList">
  17305. <li class="blockList">
  17306. <h4>stereoRectifyUncalibrated</h4>
  17307. <pre>public static&nbsp;boolean&nbsp;stereoRectifyUncalibrated(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  17308. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  17309. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  17310. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgSize,
  17311. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H1,
  17312. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H2)</pre>
  17313. <div class="block">Computes a rectification transform for an uncalibrated stereo camera.</div>
  17314. <dl>
  17315. <dt><span class="paramLabel">Parameters:</span></dt>
  17316. <dd><code>points1</code> - Array of feature points in the first image.</dd>
  17317. <dd><code>points2</code> - The corresponding points in the second image. The same formats as in
  17318. #findFundamentalMat are supported.</dd>
  17319. <dd><code>F</code> - Input fundamental matrix. It can be computed from the same set of point pairs using
  17320. #findFundamentalMat .</dd>
  17321. <dd><code>imgSize</code> - Size of the image.</dd>
  17322. <dd><code>H1</code> - Output rectification homography matrix for the first image.</dd>
  17323. <dd><code>H2</code> - Output rectification homography matrix for the second image.
  17324. than zero, all the point pairs that do not comply with the epipolar geometry (that is, the points
  17325. for which \(|\texttt{points2[i]}^T \cdot \texttt{F} \cdot \texttt{points1[i]}|&gt;\texttt{threshold}\) )
  17326. are rejected prior to computing the homographies. Otherwise, all the points are considered inliers.
  17327. The function computes the rectification transformations without knowing intrinsic parameters of the
  17328. cameras and their relative position in the space, which explains the suffix "uncalibrated". Another
  17329. related difference from #stereoRectify is that the function outputs not the rectification
  17330. transformations in the object (3D) space, but the planar perspective transformations encoded by the
  17331. homography matrices H1 and H2 . The function implements the algorithm CITE: Hartley99 .
  17332. <b>Note:</b>
  17333. While the algorithm does not need to know the intrinsic parameters of the cameras, it heavily
  17334. depends on the epipolar geometry. Therefore, if the camera lenses have a significant distortion,
  17335. it would be better to correct it before computing the fundamental matrix and calling this
  17336. function. For example, distortion coefficients can be estimated for each head of stereo camera
  17337. separately by using #calibrateCamera . Then, the images can be corrected using #undistort , or
  17338. just the point coordinates can be corrected with #undistortPoints .</dd>
  17339. <dt><span class="returnLabel">Returns:</span></dt>
  17340. <dd>automatically generated</dd>
  17341. </dl>
  17342. </li>
  17343. </ul>
  17344. <a name="stereoRectifyUncalibrated-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Size-org.opencv.core.Mat-org.opencv.core.Mat-double-">
  17345. <!-- -->
  17346. </a>
  17347. <ul class="blockList">
  17348. <li class="blockList">
  17349. <h4>stereoRectifyUncalibrated</h4>
  17350. <pre>public static&nbsp;boolean&nbsp;stereoRectifyUncalibrated(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points1,
  17351. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points2,
  17352. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;F,
  17353. <a href="../../../org/opencv/core/Size.html" title="class in org.opencv.core">Size</a>&nbsp;imgSize,
  17354. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H1,
  17355. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;H2,
  17356. double&nbsp;threshold)</pre>
  17357. <div class="block">Computes a rectification transform for an uncalibrated stereo camera.</div>
  17358. <dl>
  17359. <dt><span class="paramLabel">Parameters:</span></dt>
  17360. <dd><code>points1</code> - Array of feature points in the first image.</dd>
  17361. <dd><code>points2</code> - The corresponding points in the second image. The same formats as in
  17362. #findFundamentalMat are supported.</dd>
  17363. <dd><code>F</code> - Input fundamental matrix. It can be computed from the same set of point pairs using
  17364. #findFundamentalMat .</dd>
  17365. <dd><code>imgSize</code> - Size of the image.</dd>
  17366. <dd><code>H1</code> - Output rectification homography matrix for the first image.</dd>
  17367. <dd><code>H2</code> - Output rectification homography matrix for the second image.</dd>
  17368. <dd><code>threshold</code> - Optional threshold used to filter out the outliers. If the parameter is greater
  17369. than zero, all the point pairs that do not comply with the epipolar geometry (that is, the points
  17370. for which \(|\texttt{points2[i]}^T \cdot \texttt{F} \cdot \texttt{points1[i]}|&gt;\texttt{threshold}\) )
  17371. are rejected prior to computing the homographies. Otherwise, all the points are considered inliers.
  17372. The function computes the rectification transformations without knowing intrinsic parameters of the
  17373. cameras and their relative position in the space, which explains the suffix "uncalibrated". Another
  17374. related difference from #stereoRectify is that the function outputs not the rectification
  17375. transformations in the object (3D) space, but the planar perspective transformations encoded by the
  17376. homography matrices H1 and H2 . The function implements the algorithm CITE: Hartley99 .
  17377. <b>Note:</b>
  17378. While the algorithm does not need to know the intrinsic parameters of the cameras, it heavily
  17379. depends on the epipolar geometry. Therefore, if the camera lenses have a significant distortion,
  17380. it would be better to correct it before computing the fundamental matrix and calling this
  17381. function. For example, distortion coefficients can be estimated for each head of stereo camera
  17382. separately by using #calibrateCamera . Then, the images can be corrected using #undistort , or
  17383. just the point coordinates can be corrected with #undistortPoints .</dd>
  17384. <dt><span class="returnLabel">Returns:</span></dt>
  17385. <dd>automatically generated</dd>
  17386. </dl>
  17387. </li>
  17388. </ul>
  17389. <a name="triangulatePoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  17390. <!-- -->
  17391. </a>
  17392. <ul class="blockList">
  17393. <li class="blockList">
  17394. <h4>triangulatePoints</h4>
  17395. <pre>public static&nbsp;void&nbsp;triangulatePoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatr1,
  17396. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projMatr2,
  17397. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projPoints1,
  17398. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;projPoints2,
  17399. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;points4D)</pre>
  17400. <div class="block">This function reconstructs 3-dimensional points (in homogeneous coordinates) by using
  17401. their observations with a stereo camera.</div>
  17402. <dl>
  17403. <dt><span class="paramLabel">Parameters:</span></dt>
  17404. <dd><code>projMatr1</code> - 3x4 projection matrix of the first camera, i.e. this matrix projects 3D points
  17405. given in the world's coordinate system into the first image.</dd>
  17406. <dd><code>projMatr2</code> - 3x4 projection matrix of the second camera, i.e. this matrix projects 3D points
  17407. given in the world's coordinate system into the second image.</dd>
  17408. <dd><code>projPoints1</code> - 2xN array of feature points in the first image. In the case of the c++ version,
  17409. it can be also a vector of feature points or two-channel matrix of size 1xN or Nx1.</dd>
  17410. <dd><code>projPoints2</code> - 2xN array of corresponding points in the second image. In the case of the c++
  17411. version, it can be also a vector of feature points or two-channel matrix of size 1xN or Nx1.</dd>
  17412. <dd><code>points4D</code> - 4xN array of reconstructed points in homogeneous coordinates. These points are
  17413. returned in the world's coordinate system.
  17414. <b>Note:</b>
  17415. Keep in mind that all input data should be of float type in order for this function to work.
  17416. <b>Note:</b>
  17417. If the projection matrices from REF: stereoRectify are used, then the returned points are
  17418. represented in the first camera's rectified coordinate system.
  17419. SEE:
  17420. reprojectImageTo3D</dd>
  17421. </dl>
  17422. </li>
  17423. </ul>
  17424. <a name="undistort-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  17425. <!-- -->
  17426. </a>
  17427. <ul class="blockList">
  17428. <li class="blockList">
  17429. <h4>undistort</h4>
  17430. <pre>public static&nbsp;void&nbsp;undistort(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  17431. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  17432. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17433. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs)</pre>
  17434. <div class="block">Transforms an image to compensate for lens distortion.
  17435. The function transforms an image to compensate radial and tangential lens distortion.
  17436. The function is simply a combination of #initUndistortRectifyMap (with unity R ) and #remap
  17437. (with bilinear interpolation). See the former function for details of the transformation being
  17438. performed.
  17439. Those pixels in the destination image, for which there is no correspondent pixels in the source
  17440. image, are filled with zeros (black color).
  17441. A particular subset of the source image that will be visible in the corrected image can be regulated
  17442. by newCameraMatrix. You can use #getOptimalNewCameraMatrix to compute the appropriate
  17443. newCameraMatrix depending on your requirements.
  17444. The camera matrix and the distortion parameters can be determined using #calibrateCamera. If
  17445. the resolution of images is different from the resolution used at the calibration stage, \(f_x,
  17446. f_y, c_x\) and \(c_y\) need to be scaled accordingly, while the distortion coefficients remain
  17447. the same.</div>
  17448. <dl>
  17449. <dt><span class="paramLabel">Parameters:</span></dt>
  17450. <dd><code>src</code> - Input (distorted) image.</dd>
  17451. <dd><code>dst</code> - Output (corrected) image that has the same size and type as src .</dd>
  17452. <dd><code>cameraMatrix</code> - Input camera matrix \(A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17453. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  17454. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  17455. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
  17456. cameraMatrix but you may additionally scale and shift the result by using a different matrix.</dd>
  17457. </dl>
  17458. </li>
  17459. </ul>
  17460. <a name="undistort-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  17461. <!-- -->
  17462. </a>
  17463. <ul class="blockList">
  17464. <li class="blockList">
  17465. <h4>undistort</h4>
  17466. <pre>public static&nbsp;void&nbsp;undistort(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  17467. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  17468. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17469. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  17470. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;newCameraMatrix)</pre>
  17471. <div class="block">Transforms an image to compensate for lens distortion.
  17472. The function transforms an image to compensate radial and tangential lens distortion.
  17473. The function is simply a combination of #initUndistortRectifyMap (with unity R ) and #remap
  17474. (with bilinear interpolation). See the former function for details of the transformation being
  17475. performed.
  17476. Those pixels in the destination image, for which there is no correspondent pixels in the source
  17477. image, are filled with zeros (black color).
  17478. A particular subset of the source image that will be visible in the corrected image can be regulated
  17479. by newCameraMatrix. You can use #getOptimalNewCameraMatrix to compute the appropriate
  17480. newCameraMatrix depending on your requirements.
  17481. The camera matrix and the distortion parameters can be determined using #calibrateCamera. If
  17482. the resolution of images is different from the resolution used at the calibration stage, \(f_x,
  17483. f_y, c_x\) and \(c_y\) need to be scaled accordingly, while the distortion coefficients remain
  17484. the same.</div>
  17485. <dl>
  17486. <dt><span class="paramLabel">Parameters:</span></dt>
  17487. <dd><code>src</code> - Input (distorted) image.</dd>
  17488. <dd><code>dst</code> - Output (corrected) image that has the same size and type as src .</dd>
  17489. <dd><code>cameraMatrix</code> - Input camera matrix \(A = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17490. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  17491. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  17492. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  17493. <dd><code>newCameraMatrix</code> - Camera matrix of the distorted image. By default, it is the same as
  17494. cameraMatrix but you may additionally scale and shift the result by using a different matrix.</dd>
  17495. </dl>
  17496. </li>
  17497. </ul>
  17498. <a name="undistortImagePoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  17499. <!-- -->
  17500. </a>
  17501. <ul class="blockList">
  17502. <li class="blockList">
  17503. <h4>undistortImagePoints</h4>
  17504. <pre>public static&nbsp;void&nbsp;undistortImagePoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  17505. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  17506. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17507. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs)</pre>
  17508. <div class="block">Compute undistorted image points position</div>
  17509. <dl>
  17510. <dt><span class="paramLabel">Parameters:</span></dt>
  17511. <dd><code>src</code> - Observed points position, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel (CV_32FC2 or
  17512. CV_64FC2) (or vector&lt;Point2f&gt; ).</dd>
  17513. <dd><code>dst</code> - Output undistorted points position (1xN/Nx1 2-channel or vector&lt;Point2f&gt; ).</dd>
  17514. <dd><code>cameraMatrix</code> - Camera matrix \(\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17515. <dd><code>distCoeffs</code> - Distortion coefficients</dd>
  17516. </dl>
  17517. </li>
  17518. </ul>
  17519. <a name="undistortImagePoints-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">
  17520. <!-- -->
  17521. </a>
  17522. <ul class="blockList">
  17523. <li class="blockList">
  17524. <h4>undistortImagePoints</h4>
  17525. <pre>public static&nbsp;void&nbsp;undistortImagePoints(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  17526. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  17527. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17528. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  17529. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;arg1)</pre>
  17530. <div class="block">Compute undistorted image points position</div>
  17531. <dl>
  17532. <dt><span class="paramLabel">Parameters:</span></dt>
  17533. <dd><code>src</code> - Observed points position, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel (CV_32FC2 or
  17534. CV_64FC2) (or vector&lt;Point2f&gt; ).</dd>
  17535. <dd><code>dst</code> - Output undistorted points position (1xN/Nx1 2-channel or vector&lt;Point2f&gt; ).</dd>
  17536. <dd><code>cameraMatrix</code> - Camera matrix \(\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17537. <dd><code>distCoeffs</code> - Distortion coefficients</dd>
  17538. <dd><code>arg1</code> - automatically generated</dd>
  17539. </dl>
  17540. </li>
  17541. </ul>
  17542. <a name="undistortPoints-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.Mat-">
  17543. <!-- -->
  17544. </a>
  17545. <ul class="blockList">
  17546. <li class="blockList">
  17547. <h4>undistortPoints</h4>
  17548. <pre>public static&nbsp;void&nbsp;undistortPoints(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;src,
  17549. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dst,
  17550. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17551. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs)</pre>
  17552. <div class="block">Computes the ideal point coordinates from the observed point coordinates.
  17553. The function is similar to #undistort and #initUndistortRectifyMap but it operates on a
  17554. sparse set of points instead of a raster image. Also the function performs a reverse transformation
  17555. to #projectPoints. In case of a 3D object, it does not reconstruct its 3D coordinates, but for a
  17556. planar object, it does, up to a translation vector, if the proper R is specified.
  17557. For each observed point coordinate \((u, v)\) the function computes:
  17558. \(
  17559. \begin{array}{l}
  17560. x^{"} \leftarrow (u - c_x)/f_x \\
  17561. y^{"} \leftarrow (v - c_y)/f_y \\
  17562. (x',y') = undistort(x^{"},y^{"}, \texttt{distCoeffs}) \\
  17563. {[X\,Y\,W]} ^T \leftarrow R*[x' \, y' \, 1]^T \\
  17564. x \leftarrow X/W \\
  17565. y \leftarrow Y/W \\
  17566. \text{only performed if P is specified:} \\
  17567. u' \leftarrow x {f'}_x + {c'}_x \\
  17568. v' \leftarrow y {f'}_y + {c'}_y
  17569. \end{array}
  17570. \)
  17571. where *undistort* is an approximate iterative algorithm that estimates the normalized original
  17572. point coordinates out of the normalized distorted point coordinates ("normalized" means that the
  17573. coordinates do not depend on the camera matrix).
  17574. The function can be used for both a stereo camera head or a monocular camera (when R is empty).</div>
  17575. <dl>
  17576. <dt><span class="paramLabel">Parameters:</span></dt>
  17577. <dd><code>src</code> - Observed point coordinates, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel (CV_32FC2 or CV_64FC2) (or
  17578. vector&lt;Point2f&gt; ).</dd>
  17579. <dd><code>dst</code> - Output ideal point coordinates (1xN/Nx1 2-channel or vector&lt;Point2f&gt; ) after undistortion and reverse perspective
  17580. transformation. If matrix P is identity or omitted, dst will contain normalized point coordinates.</dd>
  17581. <dd><code>cameraMatrix</code> - Camera matrix \(\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17582. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  17583. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  17584. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
  17585. #stereoRectify can be passed here. If the matrix is empty, the identity transformation is used.
  17586. #stereoRectify can be passed here. If the matrix is empty, the identity new camera matrix is used.</dd>
  17587. </dl>
  17588. </li>
  17589. </ul>
  17590. <a name="undistortPoints-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  17591. <!-- -->
  17592. </a>
  17593. <ul class="blockList">
  17594. <li class="blockList">
  17595. <h4>undistortPoints</h4>
  17596. <pre>public static&nbsp;void&nbsp;undistortPoints(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;src,
  17597. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dst,
  17598. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17599. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  17600. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R)</pre>
  17601. <div class="block">Computes the ideal point coordinates from the observed point coordinates.
  17602. The function is similar to #undistort and #initUndistortRectifyMap but it operates on a
  17603. sparse set of points instead of a raster image. Also the function performs a reverse transformation
  17604. to #projectPoints. In case of a 3D object, it does not reconstruct its 3D coordinates, but for a
  17605. planar object, it does, up to a translation vector, if the proper R is specified.
  17606. For each observed point coordinate \((u, v)\) the function computes:
  17607. \(
  17608. \begin{array}{l}
  17609. x^{"} \leftarrow (u - c_x)/f_x \\
  17610. y^{"} \leftarrow (v - c_y)/f_y \\
  17611. (x',y') = undistort(x^{"},y^{"}, \texttt{distCoeffs}) \\
  17612. {[X\,Y\,W]} ^T \leftarrow R*[x' \, y' \, 1]^T \\
  17613. x \leftarrow X/W \\
  17614. y \leftarrow Y/W \\
  17615. \text{only performed if P is specified:} \\
  17616. u' \leftarrow x {f'}_x + {c'}_x \\
  17617. v' \leftarrow y {f'}_y + {c'}_y
  17618. \end{array}
  17619. \)
  17620. where *undistort* is an approximate iterative algorithm that estimates the normalized original
  17621. point coordinates out of the normalized distorted point coordinates ("normalized" means that the
  17622. coordinates do not depend on the camera matrix).
  17623. The function can be used for both a stereo camera head or a monocular camera (when R is empty).</div>
  17624. <dl>
  17625. <dt><span class="paramLabel">Parameters:</span></dt>
  17626. <dd><code>src</code> - Observed point coordinates, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel (CV_32FC2 or CV_64FC2) (or
  17627. vector&lt;Point2f&gt; ).</dd>
  17628. <dd><code>dst</code> - Output ideal point coordinates (1xN/Nx1 2-channel or vector&lt;Point2f&gt; ) after undistortion and reverse perspective
  17629. transformation. If matrix P is identity or omitted, dst will contain normalized point coordinates.</dd>
  17630. <dd><code>cameraMatrix</code> - Camera matrix \(\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17631. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  17632. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  17633. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  17634. <dd><code>R</code> - Rectification transformation in the object space (3x3 matrix). R1 or R2 computed by
  17635. #stereoRectify can be passed here. If the matrix is empty, the identity transformation is used.
  17636. #stereoRectify can be passed here. If the matrix is empty, the identity new camera matrix is used.</dd>
  17637. </dl>
  17638. </li>
  17639. </ul>
  17640. <a name="undistortPoints-org.opencv.core.MatOfPoint2f-org.opencv.core.MatOfPoint2f-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-">
  17641. <!-- -->
  17642. </a>
  17643. <ul class="blockList">
  17644. <li class="blockList">
  17645. <h4>undistortPoints</h4>
  17646. <pre>public static&nbsp;void&nbsp;undistortPoints(<a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;src,
  17647. <a href="../../../org/opencv/core/MatOfPoint2f.html" title="class in org.opencv.core">MatOfPoint2f</a>&nbsp;dst,
  17648. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17649. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  17650. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  17651. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P)</pre>
  17652. <div class="block">Computes the ideal point coordinates from the observed point coordinates.
  17653. The function is similar to #undistort and #initUndistortRectifyMap but it operates on a
  17654. sparse set of points instead of a raster image. Also the function performs a reverse transformation
  17655. to #projectPoints. In case of a 3D object, it does not reconstruct its 3D coordinates, but for a
  17656. planar object, it does, up to a translation vector, if the proper R is specified.
  17657. For each observed point coordinate \((u, v)\) the function computes:
  17658. \(
  17659. \begin{array}{l}
  17660. x^{"} \leftarrow (u - c_x)/f_x \\
  17661. y^{"} \leftarrow (v - c_y)/f_y \\
  17662. (x',y') = undistort(x^{"},y^{"}, \texttt{distCoeffs}) \\
  17663. {[X\,Y\,W]} ^T \leftarrow R*[x' \, y' \, 1]^T \\
  17664. x \leftarrow X/W \\
  17665. y \leftarrow Y/W \\
  17666. \text{only performed if P is specified:} \\
  17667. u' \leftarrow x {f'}_x + {c'}_x \\
  17668. v' \leftarrow y {f'}_y + {c'}_y
  17669. \end{array}
  17670. \)
  17671. where *undistort* is an approximate iterative algorithm that estimates the normalized original
  17672. point coordinates out of the normalized distorted point coordinates ("normalized" means that the
  17673. coordinates do not depend on the camera matrix).
  17674. The function can be used for both a stereo camera head or a monocular camera (when R is empty).</div>
  17675. <dl>
  17676. <dt><span class="paramLabel">Parameters:</span></dt>
  17677. <dd><code>src</code> - Observed point coordinates, 2xN/Nx2 1-channel or 1xN/Nx1 2-channel (CV_32FC2 or CV_64FC2) (or
  17678. vector&lt;Point2f&gt; ).</dd>
  17679. <dd><code>dst</code> - Output ideal point coordinates (1xN/Nx1 2-channel or vector&lt;Point2f&gt; ) after undistortion and reverse perspective
  17680. transformation. If matrix P is identity or omitted, dst will contain normalized point coordinates.</dd>
  17681. <dd><code>cameraMatrix</code> - Camera matrix \(\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\) .</dd>
  17682. <dd><code>distCoeffs</code> - Input vector of distortion coefficients
  17683. \((k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\)
  17684. of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.</dd>
  17685. <dd><code>R</code> - Rectification transformation in the object space (3x3 matrix). R1 or R2 computed by
  17686. #stereoRectify can be passed here. If the matrix is empty, the identity transformation is used.</dd>
  17687. <dd><code>P</code> - New camera matrix (3x3) or new projection matrix (3x4) \(\begin{bmatrix} {f'}_x &amp; 0 &amp; {c'}_x &amp; t_x \\ 0 &amp; {f'}_y &amp; {c'}_y &amp; t_y \\ 0 &amp; 0 &amp; 1 &amp; t_z \end{bmatrix}\). P1 or P2 computed by
  17688. #stereoRectify can be passed here. If the matrix is empty, the identity new camera matrix is used.</dd>
  17689. </dl>
  17690. </li>
  17691. </ul>
  17692. <a name="undistortPointsIter-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.Mat-org.opencv.core.TermCriteria-">
  17693. <!-- -->
  17694. </a>
  17695. <ul class="blockList">
  17696. <li class="blockList">
  17697. <h4>undistortPointsIter</h4>
  17698. <pre>public static&nbsp;void&nbsp;undistortPointsIter(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;src,
  17699. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;dst,
  17700. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cameraMatrix,
  17701. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;distCoeffs,
  17702. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;R,
  17703. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;P,
  17704. <a href="../../../org/opencv/core/TermCriteria.html" title="class in org.opencv.core">TermCriteria</a>&nbsp;criteria)</pre>
  17705. <div class="block"><b>Note:</b> Default version of #undistortPoints does 5 iterations to compute undistorted points.</div>
  17706. <dl>
  17707. <dt><span class="paramLabel">Parameters:</span></dt>
  17708. <dd><code>src</code> - automatically generated</dd>
  17709. <dd><code>dst</code> - automatically generated</dd>
  17710. <dd><code>cameraMatrix</code> - automatically generated</dd>
  17711. <dd><code>distCoeffs</code> - automatically generated</dd>
  17712. <dd><code>R</code> - automatically generated</dd>
  17713. <dd><code>P</code> - automatically generated</dd>
  17714. <dd><code>criteria</code> - automatically generated</dd>
  17715. </dl>
  17716. </li>
  17717. </ul>
  17718. <a name="validateDisparity-org.opencv.core.Mat-org.opencv.core.Mat-int-int-">
  17719. <!-- -->
  17720. </a>
  17721. <ul class="blockList">
  17722. <li class="blockList">
  17723. <h4>validateDisparity</h4>
  17724. <pre>public static&nbsp;void&nbsp;validateDisparity(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  17725. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cost,
  17726. int&nbsp;minDisparity,
  17727. int&nbsp;numberOfDisparities)</pre>
  17728. </li>
  17729. </ul>
  17730. <a name="validateDisparity-org.opencv.core.Mat-org.opencv.core.Mat-int-int-int-">
  17731. <!-- -->
  17732. </a>
  17733. <ul class="blockListLast">
  17734. <li class="blockList">
  17735. <h4>validateDisparity</h4>
  17736. <pre>public static&nbsp;void&nbsp;validateDisparity(<a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;disparity,
  17737. <a href="../../../org/opencv/core/Mat.html" title="class in org.opencv.core">Mat</a>&nbsp;cost,
  17738. int&nbsp;minDisparity,
  17739. int&nbsp;numberOfDisparities,
  17740. int&nbsp;disp12MaxDisp)</pre>
  17741. </li>
  17742. </ul>
  17743. </li>
  17744. </ul>
  17745. </li>
  17746. </ul>
  17747. </div>
  17748. </div>
  17749. <!-- ========= END OF CLASS DATA ========= -->
  17750. <!-- ======= START OF BOTTOM NAVBAR ====== -->
  17751. <div class="bottomNav"><a name="navbar.bottom">
  17752. <!-- -->
  17753. </a>
  17754. <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
  17755. <a name="navbar.bottom.firstrow">
  17756. <!-- -->
  17757. </a>
  17758. <ul class="navList" title="Navigation">
  17759. <li><a href="../../../overview-summary.html">Overview</a></li>
  17760. <li><a href="package-summary.html">Package</a></li>
  17761. <li class="navBarCell1Rev">Class</li>
  17762. <li><a href="package-tree.html">Tree</a></li>
  17763. <li><a href="../../../index-all.html">Index</a></li>
  17764. <li><a href="../../../help-doc.html">Help</a></li>
  17765. </ul>
  17766. <div class="aboutLanguage">
  17767. <script>
  17768. var url = window.location.href;
  17769. var pos = url.lastIndexOf('/javadoc/');
  17770. url = pos >= 0 ? (url.substring(0, pos) + '/javadoc/mymath.js') : (window.location.origin + '/mymath.js');
  17771. var script = document.createElement('script');
  17772. script.src = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML,' + url;
  17773. document.getElementsByTagName('head')[0].appendChild(script);
  17774. </script>
  17775. </div>
  17776. </div>
  17777. <div class="subNav">
  17778. <ul class="navList">
  17779. <li>Prev&nbsp;Class</li>
  17780. <li><a href="../../../org/opencv/calib3d/StereoBM.html" title="class in org.opencv.calib3d"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
  17781. </ul>
  17782. <ul class="navList">
  17783. <li><a href="../../../index.html?org/opencv/calib3d/Calib3d.html" target="_top">Frames</a></li>
  17784. <li><a href="Calib3d.html" target="_top">No&nbsp;Frames</a></li>
  17785. </ul>
  17786. <ul class="navList" id="allclasses_navbar_bottom">
  17787. <li><a href="../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
  17788. </ul>
  17789. <div>
  17790. <script type="text/javascript"><!--
  17791. allClassesLink = document.getElementById("allclasses_navbar_bottom");
  17792. if(window==top) {
  17793. allClassesLink.style.display = "block";
  17794. }
  17795. else {
  17796. allClassesLink.style.display = "none";
  17797. }
  17798. //-->
  17799. </script>
  17800. </div>
  17801. <div>
  17802. <ul class="subNavList">
  17803. <li>Summary:&nbsp;</li>
  17804. <li>Nested&nbsp;|&nbsp;</li>
  17805. <li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li>
  17806. <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
  17807. <li><a href="#method.summary">Method</a></li>
  17808. </ul>
  17809. <ul class="subNavList">
  17810. <li>Detail:&nbsp;</li>
  17811. <li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li>
  17812. <li><a href="#constructor.detail">Constr</a>&nbsp;|&nbsp;</li>
  17813. <li><a href="#method.detail">Method</a></li>
  17814. </ul>
  17815. </div>
  17816. <a name="skip.navbar.bottom">
  17817. <!-- -->
  17818. </a></div>
  17819. <!-- ======== END OF BOTTOM NAVBAR ======= -->
  17820. <p class="legalCopy"><small>Generated on 2023-06-28 12:47:21 / OpenCV 4.8.0</small></p>
  17821. </body>
  17822. </html>