tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

flow_estimation.h (3175B)


      1 /*
      2 * Copyright (c) 2016, Alliance for Open Media. All rights reserved.
      3 *
      4 * This source code is subject to the terms of the BSD 2 Clause License and
      5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
      6 * was not distributed with this source code in the LICENSE file, you can
      7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
      8 * Media Patent License 1.0 was not distributed with this source code in the
      9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
     10 */
     11 
     12 #ifndef AOM_AOM_DSP_FLOW_ESTIMATION_H_
     13 #define AOM_AOM_DSP_FLOW_ESTIMATION_H_
     14 
     15 #include "aom_dsp/pyramid.h"
     16 #include "aom_dsp/flow_estimation/corner_detect.h"
     17 #include "aom_ports/mem.h"
     18 #include "aom_scale/yv12config.h"
     19 
     20 #ifdef __cplusplus
     21 extern "C" {
     22 #endif
     23 
     24 #define MAX_PARAMDIM 6
     25 #define MIN_INLIER_PROB 0.1
     26 
     27 /* clang-format off */
     28 enum {
     29  IDENTITY = 0,      // identity transformation, 0-parameter
     30  TRANSLATION = 1,   // translational motion 2-parameter
     31  ROTZOOM = 2,       // simplified affine with rotation + zoom only, 4-parameter
     32  AFFINE = 3,        // affine, 6-parameter
     33  TRANS_TYPES,
     34 } UENUM1BYTE(TransformationType);
     35 /* clang-format on */
     36 
     37 // number of parameters used by each transformation in TransformationTypes
     38 static const int trans_model_params[TRANS_TYPES] = { 0, 2, 4, 6 };
     39 
     40 // Available methods which can be used for global motion estimation
     41 typedef enum {
     42  GLOBAL_MOTION_METHOD_FEATURE_MATCH,
     43  GLOBAL_MOTION_METHOD_DISFLOW,
     44  GLOBAL_MOTION_METHOD_LAST = GLOBAL_MOTION_METHOD_DISFLOW,
     45  GLOBAL_MOTION_METHODS
     46 } GlobalMotionMethod;
     47 
     48 typedef struct {
     49  double params[MAX_PARAMDIM];
     50  int *inliers;
     51  int num_inliers;
     52 } MotionModel;
     53 
     54 // Data structure to store a single correspondence point during global
     55 // motion search.
     56 //
     57 // A correspondence (x, y) -> (rx, ry) means that point (x, y) in the
     58 // source frame corresponds to point (rx, ry) in the ref frame.
     59 typedef struct {
     60  double x, y;
     61  double rx, ry;
     62 } Correspondence;
     63 
     64 // Which global motion method should we use in practice?
     65 // Disflow is both faster and gives better results than feature matching in
     66 // practically all cases, so we use disflow by default
     67 static const GlobalMotionMethod default_global_motion_method =
     68    GLOBAL_MOTION_METHOD_DISFLOW;
     69 
     70 extern const double kIdentityParams[MAX_PARAMDIM];
     71 
     72 // Compute a global motion model between the given source and ref frames.
     73 //
     74 // As is standard for video codecs, the resulting model maps from (x, y)
     75 // coordinates in `src` to the corresponding points in `ref`, regardless
     76 // of the temporal order of the two frames.
     77 //
     78 // Returns true if global motion estimation succeeded, false if not.
     79 // The output models should only be used if this function succeeds.
     80 bool aom_compute_global_motion(TransformationType type, YV12_BUFFER_CONFIG *src,
     81                               YV12_BUFFER_CONFIG *ref, int bit_depth,
     82                               GlobalMotionMethod gm_method,
     83                               int downsample_level, MotionModel *motion_models,
     84                               int num_motion_models, bool *mem_alloc_failed);
     85 
     86 #ifdef __cplusplus
     87 }
     88 #endif
     89 
     90 #endif  // AOM_AOM_DSP_FLOW_ESTIMATION_H_