Advertisement
4arwa

XBR-filter

Oct 24th, 2014
198
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 10.35 KB | None | 0 0
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * Copyright (c) 2014 Arwa Arif <arwaarif1994@gmail.com>
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20.  
  21. /**
  22. * @file
  23. * XBR Filter is used for depixelization of image.
  24. * This is based on Hyllian's 2xBR shader.
  25. * 2xBR Filter v0.2.5
  26. * Reference : http://board.byuu.org/viewtopic.php?f=10&t=2248
  27. */
  28.  
  29. #include "libavutil/opt.h"
  30. #include "libavutil/avassert.h"
  31. #include "libavutil/pixdesc.h"
  32. #include "internal.h"
  33.  
  34. typedef struct {
  35. uint32_t rgbtoyuv[1<<24];
  36. } xBRContext;
  37.  
  38. /**
  39. * Calculates the weight of difference of the pixels, by transforming these
  40. * pixels into their Y'UV parts. It then uses the threshold used by HQx filters:
  41. * 48*Y + 7*U + 6*V, to give it those smooth looking edges.
  42. **/
  43. static int d(AVFrame *in,int x1,int y1,int x2,int y2,const uint32_t *r2y){
  44.  
  45. #define YMASK 0xff0000
  46. #define UMASK 0x00ff00
  47. #define VMASK 0x0000ff
  48.  
  49. int r1 = *(in->data[0] + y1 * in->linesize[0] + x1*3);
  50. int g1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + 1);
  51. int b1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + 2);
  52.  
  53. int r2 = *(in->data[0] + y2 * in->linesize[0] + x2*3);
  54. int g2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + 1);
  55. int b2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + 2);
  56. printf("entered\n");
  57. uint32_t c1 = (r1 | g1<<8 | b1<<16);
  58. uint32_t c2 = (r2 | g2<<8 | b2<<16);
  59.  
  60. uint32_t yuv1 = r2y[c1 & 0xffffff];
  61. uint32_t yuv2 = r2y[c2 & 0xffffff];
  62.  
  63. return abs((yuv1 & YMASK) - (yuv2 & YMASK)) > (48 << 16) ||
  64. abs((yuv1 & UMASK) - (yuv2 & UMASK)) > ( 7 << 8) ||
  65. abs((yuv1 & VMASK) - (yuv2 & VMASK)) > ( 6 << 0);
  66. }
  67.  
  68. /**
  69. * Mixes a pixel A, with pixel B, with B's transperancy set to 'a'
  70. * In other words, A is a solid color (bottom) and B is a transparent color (top)
  71. **/
  72. static int mix(AVFrame *in,int x1,int y1,int x2,int y2,int a,int color){
  73.  
  74. int col1,col2;
  75. col1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + color);
  76. col2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + color);
  77.  
  78. return (a*col2 + (2-a)*col1)/2;
  79. };
  80.  
  81. /**
  82. * Fills the output matrix
  83. **/
  84. static void fill(AVFrame *in,AVFrame *out,int u,int v,int x,int y,int mode,int new_x,int new_y){
  85.  
  86. int r,g,b;
  87. /*mix colors if they are not on boundary*/
  88. if(mode!=0 && u>=0 && v>=0 && u<in->width && v<in->height){
  89.  
  90. r = mix(in,u,v,x,y,1,0);
  91. g = mix(in,u,v,x,y,1,1);
  92. b = mix(in,u,v,x,y,1,2);
  93.  
  94. } else{
  95.  
  96. r = *(in->data[0] + y*in->linesize[0] + x*3);
  97. g = *(in->data[0] + y*in->linesize[0] + x*3 + 1);
  98. b = *(in->data[0] + y*in->linesize[0] + x*3 + 2);
  99. }
  100.  
  101. /*Insert blended color into scaledImageData*/
  102. *(out->data[0] + (new_y)*out->linesize[0] + (new_x)*3) = r;
  103. *(out->data[0] + (new_y)*out->linesize[0] + (new_x)*3 + 1) = g;
  104. *(out->data[0] + (new_y)*out->linesize[0] + (new_x)*3 + 2) = b;
  105.  
  106. return;
  107. }
  108.  
  109. /**
  110. * Applies the xBR filter rules.
  111. **/
  112. static void apply_edge_detection_rules(AVFrame *in,AVFrame *out,int x,int y,const uint32_t *r2y){
  113.  
  114. /* Matrix: (10 is 0,0 i.e: current pixel)
  115. -2 | -1| 0| +1| +2 (x)
  116. ______________________________
  117. -2 | [A1][B1][C1]
  118. -1 | [A0][ A][ B][ C][C4]
  119. 0 | [D0][ D][ E][ F][F4]
  120. +1 | [G0][ G][ H][ I][I4]
  121. +2 | [G5][H5][I5]
  122. |(y)|
  123. */
  124.  
  125. /*Cached Pixel Weight Difference*/
  126. int d_E_D = d(in, x, y, x-1, y, r2y);
  127. int d_E_B = d(in, x, y, x, y-1, r2y);
  128. int d_E_F = d(in, x, y, x+1, y, r2y);
  129. int d_E_H = d(in, x, y, x, y+1, r2y);
  130. int d_E_G = d(in, x, y, x-1, y+1, r2y);
  131. int d_E_C = d(in, x, y, x+1, y-1, r2y);
  132. int d_A_D0 = d(in, x-1, y-1, x-2, y, r2y);
  133. int d_A_B1 = d(in, x-1, y-1, x, y-2, r2y);
  134. int d_D_B = d(in, x-1, y, x, y-1, r2y);
  135. int d_D_H = d(in, x-1, y, x, y+1, r2y);
  136. int d_D_A0 = d(in, x-1, y, x-2, y-1, r2y);
  137. int d_B_F = d(in, x, y-1, x+1, y, r2y);
  138. int d_B_A1 = d(in, x, y-1, x-1, y-2, r2y);
  139. int d_E_A = d(in, x, y, x-1, y-1, r2y);
  140. int d_E_I = d(in, x, y, x+1, y+1, r2y);
  141. int d_C_F4 = d(in, x+1, y-1, x+2, y, r2y);
  142. int d_C_B1 = d(in, x+1, y-1, x, y-2, r2y);
  143. int d_F_H = d(in, x+1, y, x, y+1, r2y);
  144. int d_F_C4 = d(in, x+1, y, x+2, y-1, r2y);
  145. int d_B_C1 = d(in, x, y-1, x+1, y-2, r2y);
  146. int d_G_D0 = d(in, x-1, y+1, x-2, y, r2y);
  147. int d_G_H5 = d(in, x-1, y+1, x, y+2, r2y);
  148. int d_H_G5 = d(in, x, y+1, x-1, y+2, r2y);
  149. int d_D_G0 = d(in, x-1, y, x-2, y+1, r2y);
  150. int d_I_F4 = d(in, x+1, y+1, x+2, y, r2y);
  151. int d_I_H5 = d(in, x+1, y+1, x, y+2, r2y);
  152. int d_H_I5 = d(in, x, y+1, x+1, y+2, r2y);
  153. int d_H_I4 = d(in, x, y+1, x+2, y+1, r2y);
  154.  
  155. /**
  156. * Note: On reading edge detection rules
  157. *
  158. * Each edge rule is an if..else statement, everytime on else, the
  159. * current pixel color pointed to by matrix[0] is used to color it's edge.
  160. *
  161. * Each if statement checks wether the sum of weight difference on the left is
  162. * lesser than that of the right weight differece.
  163. */
  164.  
  165. /**
  166. * Top Left Edge Detection Rule
  167. **/
  168. if ((d_E_G+d_E_C+d_A_D0+d_A_B1+(4*d_D_B)) < (d_D_H+d_D_A0+d_B_F+d_B_A1+(4*d_E_A))){
  169. // Figure what color to blend with current pixel -->10
  170. if(d_E_D <= d_E_B)
  171. fill(in,out,x-1,y,x,y,1,x*2,y*2);
  172. else
  173. fill(in,out,x,y-1,x,y,1,x*2,y*2);
  174. } else{
  175. /*Insert current pixel color into scaledImageData*/
  176. fill(in,out,x,y,x,y,0,x*2,y*2);
  177. }
  178. /**
  179. * Top Right Edge Detection Rule
  180. **/
  181. if ((d_E_I+d_E_A+d_C_F4+d_C_B1+(4*d_B_F)) < (d_F_H+d_F_C4+d_D_B+d_B_C1+(4*d_E_C))){
  182. // Figure what color to blend with current pixel --> 10
  183. if(d_E_B <= d_E_F)
  184. fill(in,out,x,y-1,x,y,1,(x*2)+1,y*2);
  185. else
  186. fill(in,out,x+1,y,x,y,1,(x*2)+1,y*2);
  187. } else{
  188. /*Insert current pixel color into scaledImageData*/
  189. fill(in,out,x,y,x,y,0,(x*2)+1,y*2);
  190. }
  191.  
  192. /**
  193. * Bottom Left Edge Detection Rule
  194. **/
  195. if ((d_E_A+d_E_I+d_G_D0+d_G_H5+(4*d_D_H)) < (d_D_B+d_D_G0+d_F_H+d_H_G5+(4*d_E_G))){
  196. // Figure what color to blend with current pixel --> 10
  197. if(d_E_D <= d_E_H)
  198. fill(in,out,x-1,y,x,y,1,x*2,(y*2)+1);
  199. else
  200. fill(in,out,x,y+1,x,y,1,x*2,(y*2)+1);
  201.  
  202. } else{
  203. /*Insert current pixel color into scaledImageData*/
  204. fill(in,out,x,y,x,y,0,x*2,(y*2)+1);
  205. }
  206.  
  207. /**
  208. * Bottom Right Edge Detection Rule
  209. **/
  210. if ((d_E_C+d_E_G+d_I_F4+d_I_H5+(4*d_F_H)) < (d_D_H+d_H_I5+d_H_I4+d_B_F+(4*d_E_I))){
  211. // Figure what color to blend with current pixel --> 10
  212. if(d_E_F <= d_E_H)
  213. fill(in,out,x+1,y,x,y,1,(x*2)+1,(y*2)+1);
  214. else
  215. fill(in,out,x,y+1,x,y,1,(x*2)+1,(y*2)+1);
  216.  
  217. } else{
  218. /*Insert current pixel color into scaledImageData*/
  219. fill(in,out,x,y,x,y,0,(x*2)+1,(y*2)+1);
  220. }
  221. }
  222.  
  223. static int config_output(AVFilterLink *outlink)
  224. {
  225. AVFilterContext *ctx = outlink->src;
  226. AVFilterLink *inlink = ctx->inputs[0];
  227.  
  228. outlink->w = inlink->w * 2 ;
  229. outlink->h = inlink->h * 2 ;
  230. return 0;
  231. }
  232.  
  233. static int query_formats(AVFilterContext *ctx)
  234. {
  235. static const enum AVPixelFormat pix_fmts[] = {
  236. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,AV_PIX_FMT_NONE,
  237. };
  238.  
  239. ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
  240. return 0;
  241. }
  242.  
  243. static int filter_frame(AVFilterLink *inlink, AVFrame *in)
  244. {
  245. AVFilterContext *ctx = inlink->dst;
  246. AVFilterLink *outlink = ctx->outputs[0];
  247. int i,j;
  248. xBRContext *xBR = ctx->priv;
  249. printf("declaring xBR\n");
  250. const uint32_t *r2y = xBR->rgbtoyuv;
  251. AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
  252. if (!out) {
  253. av_frame_free(&in);
  254. return AVERROR(ENOMEM);
  255. }
  256.  
  257. av_frame_copy_props(out, in);
  258. for(i=0;i<inlink->w;i++)
  259. for(j=0;j<inlink->h;j++)
  260. apply_edge_detection_rules(in,out,i,j,r2y);
  261.  
  262. out->width = outlink->w;
  263. out->height = outlink->h;
  264.  
  265. av_frame_free(&in);
  266. return ff_filter_frame(outlink, out);
  267. }
  268.  
  269. static int init(AVFilterContext *ctx)
  270. {
  271. xBRContext *xbr = ctx->priv;
  272. printf("init-entering\n");
  273. uint32_t c;
  274. int bg, rg, g;
  275.  
  276. for (bg=-255; bg<256; bg++) {
  277. for (rg=-255; rg<256; rg++) {
  278. const uint32_t u = (uint32_t)((-169*rg + 500*bg)/1000) + 128;
  279. const uint32_t v = (uint32_t)(( 500*rg - 81*bg)/1000) + 128;
  280. int startg = FFMAX3(-bg, -rg, 0);
  281. int endg = FFMIN3(255-bg, 255-rg, 255);
  282. uint32_t y = (uint32_t)(( 299*rg + 1000*startg + 114*bg)/1000);
  283. c = bg + (rg<<16) + 0x010101 * startg;
  284. for (g = startg; g <= endg; g++) {
  285. printf("Value of c :");
  286. printf("%" PRIu32 "\n",c);
  287. xbr->rgbtoyuv[c] = ((y++) << 16) + (u << 8) + v;
  288. c+= 0x010101;
  289. }
  290. }
  291. }
  292. printf("init-leaving\n");
  293. return 0;
  294. }
  295.  
  296. static const AVFilterPad xbr_inputs[] = {
  297. {
  298. .name = "default",
  299. .type = AVMEDIA_TYPE_VIDEO,
  300. .filter_frame = filter_frame,
  301. },
  302. { NULL }
  303. };
  304.  
  305. static const AVFilterPad xbr_outputs[] = {
  306. {
  307. .name = "default",
  308. .type = AVMEDIA_TYPE_VIDEO,
  309. .config_props = config_output,
  310. },
  311. { NULL }
  312. };
  313.  
  314. AVFilter ff_vf_xbr = {
  315. .name = "xbr",
  316. .description = NULL_IF_CONFIG_SMALL("Scale the input by 2 using xbr algorithm."),
  317. .inputs = xbr_inputs,
  318. .outputs = xbr_outputs,
  319. .query_formats = query_formats,
  320. .init = init,
  321. };
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement