daily pastebin goal
81%
SHARE
TWEET

Untitled

a guest Jan 22nd, 2019 83 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. /*
  2.  *      uvc_video.c  --  USB Video Class driver - Video handling
  3.  *
  4.  *      Copyright (C) 2005-2010
  5.  *          Laurent Pinchart (laurent.pinchart@ideasonboard.com)
  6.  *
  7.  *      This program is free software; you can redistribute it and/or modify
  8.  *      it under the terms of the GNU General Public License as published by
  9.  *      the Free Software Foundation; either version 2 of the License, or
  10.  *      (at your option) any later version.
  11.  *
  12.  */
  13.  
  14. #include <linux/kernel.h>
  15. #include <linux/list.h>
  16. #include <linux/module.h>
  17. #include <linux/slab.h>
  18. #include <linux/usb.h>
  19. #include <linux/videodev2.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/wait.h>
  22. #include <linux/atomic.h>
  23. #include <asm/unaligned.h>
  24.  
  25. #include <media/v4l2-common.h>
  26.  
  27. #include "uvcvideo.h"
  28.  
  29. /* ------------------------------------------------------------------------
  30.  * UVC Controls
  31.  */
  32.  
  33. static int __uvc_query_ctrl(struct uvc_device *dev, __u8 query, __u8 unit,
  34.             __u8 intfnum, __u8 cs, void *data, __u16 size,
  35.             int timeout)
  36. {
  37.     __u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
  38.     unsigned int pipe;
  39.  
  40.     pipe = (query & 0x80) ? usb_rcvctrlpipe(dev->udev, 0)
  41.                   : usb_sndctrlpipe(dev->udev, 0);
  42.     type |= (query & 0x80) ? USB_DIR_IN : USB_DIR_OUT;
  43.  
  44.     return usb_control_msg(dev->udev, pipe, query, type, cs << 8,
  45.             unit << 8 | intfnum, data, size, timeout);
  46. }
  47.  
  48. static const char *uvc_query_name(__u8 query)
  49. {
  50.     switch (query) {
  51.     case UVC_SET_CUR:
  52.         return "SET_CUR";
  53.     case UVC_GET_CUR:
  54.         return "GET_CUR";
  55.     case UVC_GET_MIN:
  56.         return "GET_MIN";
  57.     case UVC_GET_MAX:
  58.         return "GET_MAX";
  59.     case UVC_GET_RES:
  60.         return "GET_RES";
  61.     case UVC_GET_LEN:
  62.         return "GET_LEN";
  63.     case UVC_GET_INFO:
  64.         return "GET_INFO";
  65.     case UVC_GET_DEF:
  66.         return "GET_DEF";
  67.     default:
  68.         return "<invalid>";
  69.     }
  70. }
  71.  
  72. int uvc_query_ctrl(struct uvc_device *dev, __u8 query, __u8 unit,
  73.             __u8 intfnum, __u8 cs, void *data, __u16 size)
  74. {
  75.     int ret;
  76.  
  77.     ret = __uvc_query_ctrl(dev, query, unit, intfnum, cs, data, size,
  78.                 UVC_CTRL_CONTROL_TIMEOUT);
  79.     if (ret != size) {
  80.         uvc_printk(KERN_ERR, "Failed to query (%s) UVC control %u on "
  81.             "unit %u: %d (exp. %u).\n", uvc_query_name(query), cs,
  82.             unit, ret, size);
  83.         return -EIO;
  84.     }
  85.  
  86.     return 0;
  87. }
  88.  
  89. static void uvc_fixup_video_ctrl(struct uvc_streaming *stream,
  90.     struct uvc_streaming_control *ctrl)
  91. {
  92.     struct uvc_format *format = NULL;
  93.     struct uvc_frame *frame = NULL;
  94.     unsigned int i;
  95.  
  96.     for (i = 0; i < stream->nformats; ++i) {
  97.         if (stream->format[i].index == ctrl->bFormatIndex) {
  98.             format = &stream->format[i];
  99.             break;
  100.         }
  101.     }
  102.  
  103.     if (format == NULL)
  104.         return;
  105.  
  106.     for (i = 0; i < format->nframes; ++i) {
  107.         if (format->frame[i].bFrameIndex == ctrl->bFrameIndex) {
  108.             frame = &format->frame[i];
  109.             break;
  110.         }
  111.     }
  112.  
  113.     if (frame == NULL)
  114.         return;
  115.  
  116.     if (!(format->flags & UVC_FMT_FLAG_COMPRESSED) ||
  117.          (ctrl->dwMaxVideoFrameSize == 0 &&
  118.           stream->dev->uvc_version < 0x0110))
  119.         ctrl->dwMaxVideoFrameSize =
  120.             frame->dwMaxVideoFrameBufferSize;
  121.  
  122.     /* The "TOSHIBA Web Camera - 5M" Chicony device (04f2:b50b) seems to
  123.      * compute the bandwidth on 16 bits and erroneously sign-extend it to
  124.      * 32 bits, resulting in a huge bandwidth value. Detect and fix that
  125.      * condition by setting the 16 MSBs to 0 when they're all equal to 1.
  126.      */
  127.     if ((ctrl->dwMaxPayloadTransferSize & 0xffff0000) == 0xffff0000)
  128.         ctrl->dwMaxPayloadTransferSize &= ~0xffff0000;
  129.  
  130.     if (!(format->flags & UVC_FMT_FLAG_COMPRESSED) &&
  131.         stream->dev->quirks & UVC_QUIRK_FIX_BANDWIDTH &&
  132.         stream->intf->num_altsetting > 1) {
  133.         u32 interval;
  134.         u32 bandwidth;
  135.  
  136.         interval = (ctrl->dwFrameInterval > 100000)
  137.              ? ctrl->dwFrameInterval
  138.              : frame->dwFrameInterval[0];
  139.  
  140.         /* Compute a bandwidth estimation by multiplying the frame
  141.          * size by the number of video frames per second, divide the
  142.          * result by the number of USB frames (or micro-frames for
  143.          * high-speed devices) per second and add the UVC header size
  144.          * (assumed to be 12 bytes long).
  145.          */
  146.         bandwidth = frame->wWidth * frame->wHeight / 8 * format->bpp;
  147.         bandwidth *= 10000000 / interval + 1;
  148.         bandwidth /= 1000;
  149.         if (stream->dev->udev->speed == USB_SPEED_HIGH)
  150.             bandwidth /= 8;
  151.         bandwidth += 12;
  152.  
  153.         /* The bandwidth estimate is too low for many cameras. Don't use
  154.          * maximum packet sizes lower than 1024 bytes to try and work
  155.          * around the problem. According to measurements done on two
  156.          * different camera models, the value is high enough to get most
  157.          * resolutions working while not preventing two simultaneous
  158.          * VGA streams at 15 fps.
  159.          */
  160.         bandwidth = max_t(u32, bandwidth, 1024);
  161.  
  162.         ctrl->dwMaxPayloadTransferSize = bandwidth;
  163.     }
  164.     if (format->flags & UVC_FMT_FLAG_COMPRESSED) {
  165.         ctrl->dwMaxPayloadTransferSize = 0x400;
  166.     }
  167. }
  168.  
  169. static int uvc_get_video_ctrl(struct uvc_streaming *stream,
  170.     struct uvc_streaming_control *ctrl, int probe, __u8 query)
  171. {
  172.     __u8 *data;
  173.     __u16 size;
  174.     int ret;
  175.  
  176.     size = stream->dev->uvc_version >= 0x0110 ? 34 : 26;
  177.     if (stream->dev->uvc_version >= 0x0150)
  178.         size = 48;
  179.     if ((stream->dev->quirks & UVC_QUIRK_PROBE_DEF) &&
  180.             query == UVC_GET_DEF)
  181.         return -EIO;
  182.  
  183.     data = kmalloc(size, GFP_KERNEL);
  184.     if (data == NULL)
  185.         return -ENOMEM;
  186.  
  187.     ret = __uvc_query_ctrl(stream->dev, query, 0, stream->intfnum,
  188.         probe ? UVC_VS_PROBE_CONTROL : UVC_VS_COMMIT_CONTROL, data,
  189.         size, uvc_timeout_param);
  190.  
  191.     if ((query == UVC_GET_MIN || query == UVC_GET_MAX) && ret == 2) {
  192.         /* Some cameras, mostly based on Bison Electronics chipsets,
  193.          * answer a GET_MIN or GET_MAX request with the wCompQuality
  194.          * field only.
  195.          */
  196.         uvc_warn_once(stream->dev, UVC_WARN_MINMAX, "UVC non "
  197.             "compliance - GET_MIN/MAX(PROBE) incorrectly "
  198.             "supported. Enabling workaround.\n");
  199.         memset(ctrl, 0, sizeof *ctrl);
  200.         ctrl->wCompQuality = le16_to_cpup((__le16 *)data);
  201.         ret = 0;
  202.         goto out;
  203.     } else if (query == UVC_GET_DEF && probe == 1 && ret != size) {
  204.         /* Many cameras don't support the GET_DEF request on their
  205.          * video probe control. Warn once and return, the caller will
  206.          * fall back to GET_CUR.
  207.          */
  208.         uvc_warn_once(stream->dev, UVC_WARN_PROBE_DEF, "UVC non "
  209.             "compliance - GET_DEF(PROBE) not supported. "
  210.             "Enabling workaround.\n");
  211.         ret = -EIO;
  212.         goto out;
  213.     } else if (ret != size) {
  214.         uvc_printk(KERN_ERR, "Failed to query (%u) UVC %s control : "
  215.             "%d (exp. %u).\n", query, probe ? "probe" : "commit",
  216.             ret, size);
  217.         ret = -EIO;
  218.         goto out;
  219.     }
  220.  
  221.     ctrl->bmHint = le16_to_cpup((__le16 *)&data[0]);
  222.     ctrl->bFormatIndex = data[2];
  223.     ctrl->bFrameIndex = data[3];
  224.     ctrl->dwFrameInterval = le32_to_cpup((__le32 *)&data[4]);
  225.     ctrl->wKeyFrameRate = le16_to_cpup((__le16 *)&data[8]);
  226.     ctrl->wPFrameRate = le16_to_cpup((__le16 *)&data[10]);
  227.     ctrl->wCompQuality = le16_to_cpup((__le16 *)&data[12]);
  228.     ctrl->wCompWindowSize = le16_to_cpup((__le16 *)&data[14]);
  229.     ctrl->wDelay = le16_to_cpup((__le16 *)&data[16]);
  230.     ctrl->dwMaxVideoFrameSize = get_unaligned_le32(&data[18]);
  231.     ctrl->dwMaxPayloadTransferSize = get_unaligned_le32(&data[22]);
  232.  
  233.     if (size == 34) {
  234.         ctrl->dwClockFrequency = get_unaligned_le32(&data[26]);
  235.         ctrl->bmFramingInfo = data[30];
  236.         ctrl->bPreferedVersion = data[31];
  237.         ctrl->bMinVersion = data[32];
  238.         ctrl->bMaxVersion = data[33];
  239.     } else {
  240.         ctrl->dwClockFrequency = stream->dev->clock_frequency;
  241.         ctrl->bmFramingInfo = 0;
  242.         ctrl->bPreferedVersion = 0;
  243.         ctrl->bMinVersion = 0;
  244.         ctrl->bMaxVersion = 0;
  245.     }
  246.  
  247.     /* Some broken devices return null or wrong dwMaxVideoFrameSize and
  248.      * dwMaxPayloadTransferSize fields. Try to get the value from the
  249.      * format and frame descriptors.
  250.      */
  251.     uvc_fixup_video_ctrl(stream, ctrl);
  252.     ret = 0;
  253.  
  254. out:
  255.     kfree(data);
  256.     return ret;
  257. }
  258.  
  259. static int uvc_set_video_ctrl(struct uvc_streaming *stream,
  260.     struct uvc_streaming_control *ctrl, int probe)
  261. {
  262.     __u8 *data;
  263.     __u16 size;
  264.     int ret;
  265.  
  266.     size = stream->dev->uvc_version >= 0x0110 ? 34 : 26;
  267.     if (stream->dev->uvc_version >= 0x0150)
  268.         size = 48;
  269.     data = kzalloc(size, GFP_KERNEL);
  270.     if (data == NULL)
  271.         return -ENOMEM;
  272.  
  273.     *(__le16 *)&data[0] = cpu_to_le16(ctrl->bmHint);
  274.     data[2] = ctrl->bFormatIndex;
  275.     data[3] = ctrl->bFrameIndex;
  276.     *(__le32 *)&data[4] = cpu_to_le32(ctrl->dwFrameInterval);
  277.     *(__le16 *)&data[8] = cpu_to_le16(ctrl->wKeyFrameRate);
  278.     *(__le16 *)&data[10] = cpu_to_le16(ctrl->wPFrameRate);
  279.     *(__le16 *)&data[12] = cpu_to_le16(ctrl->wCompQuality);
  280.     *(__le16 *)&data[14] = cpu_to_le16(ctrl->wCompWindowSize);
  281.     *(__le16 *)&data[16] = cpu_to_le16(ctrl->wDelay);
  282.     put_unaligned_le32(ctrl->dwMaxVideoFrameSize, &data[18]);
  283.     put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[22]);
  284.  
  285.     if (size == 34) {
  286.         put_unaligned_le32(ctrl->dwClockFrequency, &data[26]);
  287.         data[30] = ctrl->bmFramingInfo;
  288.         data[31] = ctrl->bPreferedVersion;
  289.         data[32] = ctrl->bMinVersion;
  290.         data[33] = ctrl->bMaxVersion;
  291.     }
  292.  
  293.     ret = __uvc_query_ctrl(stream->dev, UVC_SET_CUR, 0, stream->intfnum,
  294.         probe ? UVC_VS_PROBE_CONTROL : UVC_VS_COMMIT_CONTROL, data,
  295.         size, uvc_timeout_param);
  296.     if (ret != size) {
  297.         uvc_printk(KERN_ERR, "Failed to set UVC %s control : "
  298.             "%d (exp. %u).\n", probe ? "probe" : "commit",
  299.             ret, size);
  300.         ret = -EIO;
  301.     }
  302.  
  303.     kfree(data);
  304.     return ret;
  305. }
  306.  
  307. int uvc_probe_video(struct uvc_streaming *stream,
  308.     struct uvc_streaming_control *probe)
  309. {
  310.     struct uvc_streaming_control probe_min, probe_max;
  311.     __u16 bandwidth;
  312.     unsigned int i;
  313.     int ret;
  314.  
  315.     /* Perform probing. The device should adjust the requested values
  316.      * according to its capabilities. However, some devices, namely the
  317.      * first generation UVC Logitech webcams, don't implement the Video
  318.      * Probe control properly, and just return the needed bandwidth. For
  319.      * that reason, if the needed bandwidth exceeds the maximum available
  320.      * bandwidth, try to lower the quality.
  321.      */
  322.     ret = uvc_set_video_ctrl(stream, probe, 1);
  323.     if (ret < 0)
  324.         goto done;
  325.  
  326.     /* Get the minimum and maximum values for compression settings. */
  327.     if (!(stream->dev->quirks & UVC_QUIRK_PROBE_MINMAX)) {
  328.         ret = uvc_get_video_ctrl(stream, &probe_min, 1, UVC_GET_MIN);
  329.         if (ret < 0)
  330.             goto done;
  331.         ret = uvc_get_video_ctrl(stream, &probe_max, 1, UVC_GET_MAX);
  332.         if (ret < 0)
  333.             goto done;
  334.  
  335.         probe->wCompQuality = probe_max.wCompQuality;
  336.     }
  337.  
  338.     for (i = 0; i < 2; ++i) {
  339.         ret = uvc_set_video_ctrl(stream, probe, 1);
  340.         if (ret < 0)
  341.             goto done;
  342.         ret = uvc_get_video_ctrl(stream, probe, 1, UVC_GET_CUR);
  343.         if (ret < 0)
  344.             goto done;
  345.  
  346.         if (stream->intf->num_altsetting == 1)
  347.             break;
  348.  
  349.         bandwidth = probe->dwMaxPayloadTransferSize;
  350.         if (bandwidth <= stream->maxpsize)
  351.             break;
  352.  
  353.         if (stream->dev->quirks & UVC_QUIRK_PROBE_MINMAX) {
  354.             ret = -ENOSPC;
  355.             goto done;
  356.         }
  357.  
  358.         /* TODO: negotiate compression parameters */
  359.         probe->wKeyFrameRate = probe_min.wKeyFrameRate;
  360.         probe->wPFrameRate = probe_min.wPFrameRate;
  361.         probe->wCompQuality = probe_max.wCompQuality;
  362.         probe->wCompWindowSize = probe_min.wCompWindowSize;
  363.     }
  364.  
  365. done:
  366.     return ret;
  367. }
  368.  
  369. static int uvc_commit_video(struct uvc_streaming *stream,
  370.                 struct uvc_streaming_control *probe)
  371. {
  372.     return uvc_set_video_ctrl(stream, probe, 0);
  373. }
  374.  
  375. /* -----------------------------------------------------------------------------
  376.  * Clocks and timestamps
  377.  */
  378.  
  379. static inline void uvc_video_get_ts(struct timespec *ts)
  380. {
  381.     if (uvc_clock_param == CLOCK_MONOTONIC)
  382.         ktime_get_ts(ts);
  383.     else
  384.         ktime_get_real_ts(ts);
  385. }
  386.  
  387. static void
  388. uvc_video_clock_decode(struct uvc_streaming *stream, struct uvc_buffer *buf,
  389.                const __u8 *data, int len)
  390. {
  391.     struct uvc_clock_sample *sample;
  392.     unsigned int header_size;
  393.     bool has_pts = false;
  394.     bool has_scr = false;
  395.     unsigned long flags;
  396.     struct timespec ts;
  397.     u16 host_sof;
  398.     u16 dev_sof;
  399.  
  400.     switch (data[1] & (UVC_STREAM_PTS | UVC_STREAM_SCR)) {
  401.     case UVC_STREAM_PTS | UVC_STREAM_SCR:
  402.         header_size = 12;
  403.         has_pts = true;
  404.         has_scr = true;
  405.         break;
  406.     case UVC_STREAM_PTS:
  407.         header_size = 6;
  408.         has_pts = true;
  409.         break;
  410.     case UVC_STREAM_SCR:
  411.         header_size = 8;
  412.         has_scr = true;
  413.         break;
  414.     default:
  415.         header_size = 2;
  416.         break;
  417.     }
  418.  
  419.     /* Check for invalid headers. */
  420.     if (len < header_size)
  421.         return;
  422.  
  423.     /* Extract the timestamps:
  424.      *
  425.      * - store the frame PTS in the buffer structure
  426.      * - if the SCR field is present, retrieve the host SOF counter and
  427.      *   kernel timestamps and store them with the SCR STC and SOF fields
  428.      *   in the ring buffer
  429.      */
  430.     if (has_pts && buf != NULL)
  431.         buf->pts = get_unaligned_le32(&data[2]);
  432.  
  433.     if (!has_scr)
  434.         return;
  435.  
  436.     /* To limit the amount of data, drop SCRs with an SOF identical to the
  437.      * previous one.
  438.      */
  439.     dev_sof = get_unaligned_le16(&data[header_size - 2]);
  440.     if (dev_sof == stream->clock.last_sof)
  441.         return;
  442.  
  443.     stream->clock.last_sof = dev_sof;
  444.  
  445.     host_sof = usb_get_current_frame_number(stream->dev->udev);
  446.     uvc_video_get_ts(&ts);
  447.  
  448.     /* The UVC specification allows device implementations that can't obtain
  449.      * the USB frame number to keep their own frame counters as long as they
  450.      * match the size and frequency of the frame number associated with USB
  451.      * SOF tokens. The SOF values sent by such devices differ from the USB
  452.      * SOF tokens by a fixed offset that needs to be estimated and accounted
  453.      * for to make timestamp recovery as accurate as possible.
  454.      *
  455.      * The offset is estimated the first time a device SOF value is received
  456.      * as the difference between the host and device SOF values. As the two
  457.      * SOF values can differ slightly due to transmission delays, consider
  458.      * that the offset is null if the difference is not higher than 10 ms
  459.      * (negative differences can not happen and are thus considered as an
  460.      * offset). The video commit control wDelay field should be used to
  461.      * compute a dynamic threshold instead of using a fixed 10 ms value, but
  462.      * devices don't report reliable wDelay values.
  463.      *
  464.      * See uvc_video_clock_host_sof() for an explanation regarding why only
  465.      * the 8 LSBs of the delta are kept.
  466.      */
  467.     if (stream->clock.sof_offset == (u16)-1) {
  468.         u16 delta_sof = (host_sof - dev_sof) & 255;
  469.         if (delta_sof >= 10)
  470.             stream->clock.sof_offset = delta_sof;
  471.         else
  472.             stream->clock.sof_offset = 0;
  473.     }
  474.  
  475.     dev_sof = (dev_sof + stream->clock.sof_offset) & 2047;
  476.  
  477.     spin_lock_irqsave(&stream->clock.lock, flags);
  478.  
  479.     sample = &stream->clock.samples[stream->clock.head];
  480.     sample->dev_stc = get_unaligned_le32(&data[header_size - 6]);
  481.     sample->dev_sof = dev_sof;
  482.     sample->host_sof = host_sof;
  483.     sample->host_ts = ts;
  484.  
  485.     /* Update the sliding window head and count. */
  486.     stream->clock.head = (stream->clock.head + 1) % stream->clock.size;
  487.  
  488.     if (stream->clock.count < stream->clock.size)
  489.         stream->clock.count++;
  490.  
  491.     spin_unlock_irqrestore(&stream->clock.lock, flags);
  492. }
  493.  
  494. static void uvc_video_clock_reset(struct uvc_streaming *stream)
  495. {
  496.     struct uvc_clock *clock = &stream->clock;
  497.  
  498.     clock->head = 0;
  499.     clock->count = 0;
  500.     clock->last_sof = -1;
  501.     clock->sof_offset = -1;
  502. }
  503.  
  504. static int uvc_video_clock_init(struct uvc_streaming *stream)
  505. {
  506.     struct uvc_clock *clock = &stream->clock;
  507.  
  508.     spin_lock_init(&clock->lock);
  509.     clock->size = 32;
  510.  
  511.     clock->samples = kmalloc(clock->size * sizeof(*clock->samples),
  512.                  GFP_KERNEL);
  513.     if (clock->samples == NULL)
  514.         return -ENOMEM;
  515.  
  516.     uvc_video_clock_reset(stream);
  517.  
  518.     return 0;
  519. }
  520.  
  521. static void uvc_video_clock_cleanup(struct uvc_streaming *stream)
  522. {
  523.     kfree(stream->clock.samples);
  524.     stream->clock.samples = NULL;
  525. }
  526.  
  527. /*
  528.  * uvc_video_clock_host_sof - Return the host SOF value for a clock sample
  529.  *
  530.  * Host SOF counters reported by usb_get_current_frame_number() usually don't
  531.  * cover the whole 11-bits SOF range (0-2047) but are limited to the HCI frame
  532.  * schedule window. They can be limited to 8, 9 or 10 bits depending on the host
  533.  * controller and its configuration.
  534.  *
  535.  * We thus need to recover the SOF value corresponding to the host frame number.
  536.  * As the device and host frame numbers are sampled in a short interval, the
  537.  * difference between their values should be equal to a small delta plus an
  538.  * integer multiple of 256 caused by the host frame number limited precision.
  539.  *
  540.  * To obtain the recovered host SOF value, compute the small delta by masking
  541.  * the high bits of the host frame counter and device SOF difference and add it
  542.  * to the device SOF value.
  543.  */
  544. static u16 uvc_video_clock_host_sof(const struct uvc_clock_sample *sample)
  545. {
  546.     /* The delta value can be negative. */
  547.     s8 delta_sof;
  548.  
  549.     delta_sof = (sample->host_sof - sample->dev_sof) & 255;
  550.  
  551.     return (sample->dev_sof + delta_sof) & 2047;
  552. }
  553.  
  554. /*
  555.  * uvc_video_clock_update - Update the buffer timestamp
  556.  *
  557.  * This function converts the buffer PTS timestamp to the host clock domain by
  558.  * going through the USB SOF clock domain and stores the result in the V4L2
  559.  * buffer timestamp field.
  560.  *
  561.  * The relationship between the device clock and the host clock isn't known.
  562.  * However, the device and the host share the common USB SOF clock which can be
  563.  * used to recover that relationship.
  564.  *
  565.  * The relationship between the device clock and the USB SOF clock is considered
  566.  * to be linear over the clock samples sliding window and is given by
  567.  *
  568.  * SOF = m * PTS + p
  569.  *
  570.  * Several methods to compute the slope (m) and intercept (p) can be used. As
  571.  * the clock drift should be small compared to the sliding window size, we
  572.  * assume that the line that goes through the points at both ends of the window
  573.  * is a good approximation. Naming those points P1 and P2, we get
  574.  *
  575.  * SOF = (SOF2 - SOF1) / (STC2 - STC1) * PTS
  576.  *     + (SOF1 * STC2 - SOF2 * STC1) / (STC2 - STC1)
  577.  *
  578.  * or
  579.  *
  580.  * SOF = ((SOF2 - SOF1) * PTS + SOF1 * STC2 - SOF2 * STC1) / (STC2 - STC1)   (1)
  581.  *
  582.  * to avoid losing precision in the division. Similarly, the host timestamp is
  583.  * computed with
  584.  *
  585.  * TS = ((TS2 - TS1) * PTS + TS1 * SOF2 - TS2 * SOF1) / (SOF2 - SOF1)        (2)
  586.  *
  587.  * SOF values are coded on 11 bits by USB. We extend their precision with 16
  588.  * decimal bits, leading to a 11.16 coding.
  589.  *
  590.  * TODO: To avoid surprises with device clock values, PTS/STC timestamps should
  591.  * be normalized using the nominal device clock frequency reported through the
  592.  * UVC descriptors.
  593.  *
  594.  * Both the PTS/STC and SOF counters roll over, after a fixed but device
  595.  * specific amount of time for PTS/STC and after 2048ms for SOF. As long as the
  596.  * sliding window size is smaller than the rollover period, differences computed
  597.  * on unsigned integers will produce the correct result. However, the p term in
  598.  * the linear relations will be miscomputed.
  599.  *
  600.  * To fix the issue, we subtract a constant from the PTS and STC values to bring
  601.  * PTS to half the 32 bit STC range. The sliding window STC values then fit into
  602.  * the 32 bit range without any rollover.
  603.  *
  604.  * Similarly, we add 2048 to the device SOF values to make sure that the SOF
  605.  * computed by (1) will never be smaller than 0. This offset is then compensated
  606.  * by adding 2048 to the SOF values used in (2). However, this doesn't prevent
  607.  * rollovers between (1) and (2): the SOF value computed by (1) can be slightly
  608.  * lower than 4096, and the host SOF counters can have rolled over to 2048. This
  609.  * case is handled by subtracting 2048 from the SOF value if it exceeds the host
  610.  * SOF value at the end of the sliding window.
  611.  *
  612.  * Finally we subtract a constant from the host timestamps to bring the first
  613.  * timestamp of the sliding window to 1s.
  614.  */
  615. void uvc_video_clock_update(struct uvc_streaming *stream,
  616.                 struct vb2_v4l2_buffer *vbuf,
  617.                 struct uvc_buffer *buf)
  618. {
  619.     struct uvc_clock *clock = &stream->clock;
  620.     struct uvc_clock_sample *first;
  621.     struct uvc_clock_sample *last;
  622.     unsigned long flags;
  623.     struct timespec ts;
  624.     u32 delta_stc;
  625.     u32 y1, y2;
  626.     u32 x1, x2;
  627.     u32 mean;
  628.     u32 sof;
  629.     u32 div;
  630.     u32 rem;
  631.     u64 y;
  632.  
  633.     if (!uvc_hw_timestamps_param)
  634.         return;
  635.  
  636.     spin_lock_irqsave(&clock->lock, flags);
  637.  
  638.     if (clock->count < clock->size)
  639.         goto done;
  640.  
  641.     first = &clock->samples[clock->head];
  642.     last = &clock->samples[(clock->head - 1) % clock->size];
  643.  
  644.     /* First step, PTS to SOF conversion. */
  645.     delta_stc = buf->pts - (1UL << 31);
  646.     x1 = first->dev_stc - delta_stc;
  647.     x2 = last->dev_stc - delta_stc;
  648.     if (x1 == x2)
  649.         goto done;
  650.  
  651.     y1 = (first->dev_sof + 2048) << 16;
  652.     y2 = (last->dev_sof + 2048) << 16;
  653.     if (y2 < y1)
  654.         y2 += 2048 << 16;
  655.  
  656.     y = (u64)(y2 - y1) * (1ULL << 31) + (u64)y1 * (u64)x2
  657.       - (u64)y2 * (u64)x1;
  658.     y = div_u64(y, x2 - x1);
  659.  
  660.     sof = y;
  661.  
  662.     uvc_trace(UVC_TRACE_CLOCK, "%s: PTS %u y %llu.%06llu SOF %u.%06llu "
  663.           "(x1 %u x2 %u y1 %u y2 %u SOF offset %u)\n",
  664.           stream->dev->name, buf->pts,
  665.           y >> 16, div_u64((y & 0xffff) * 1000000, 65536),
  666.           sof >> 16, div_u64(((u64)sof & 0xffff) * 1000000LLU, 65536),
  667.           x1, x2, y1, y2, clock->sof_offset);
  668.  
  669.     /* Second step, SOF to host clock conversion. */
  670.     x1 = (uvc_video_clock_host_sof(first) + 2048) << 16;
  671.     x2 = (uvc_video_clock_host_sof(last) + 2048) << 16;
  672.     if (x2 < x1)
  673.         x2 += 2048 << 16;
  674.     if (x1 == x2)
  675.         goto done;
  676.  
  677.     ts = timespec_sub(last->host_ts, first->host_ts);
  678.     y1 = NSEC_PER_SEC;
  679.     y2 = (ts.tv_sec + 1) * NSEC_PER_SEC + ts.tv_nsec;
  680.  
  681.     /* Interpolated and host SOF timestamps can wrap around at slightly
  682.      * different times. Handle this by adding or removing 2048 to or from
  683.      * the computed SOF value to keep it close to the SOF samples mean
  684.      * value.
  685.      */
  686.     mean = (x1 + x2) / 2;
  687.     if (mean - (1024 << 16) > sof)
  688.         sof += 2048 << 16;
  689.     else if (sof > mean + (1024 << 16))
  690.         sof -= 2048 << 16;
  691.  
  692.     y = (u64)(y2 - y1) * (u64)sof + (u64)y1 * (u64)x2
  693.       - (u64)y2 * (u64)x1;
  694.     y = div_u64(y, x2 - x1);
  695.  
  696.     div = div_u64_rem(y, NSEC_PER_SEC, &rem);
  697.     ts.tv_sec = first->host_ts.tv_sec - 1 + div;
  698.     ts.tv_nsec = first->host_ts.tv_nsec + rem;
  699.     if (ts.tv_nsec >= NSEC_PER_SEC) {
  700.         ts.tv_sec++;
  701.         ts.tv_nsec -= NSEC_PER_SEC;
  702.     }
  703.  
  704.     uvc_trace(UVC_TRACE_CLOCK, "%s: SOF %u.%06llu y %llu ts %lu.%06lu "
  705.           "buf ts %lu.%06lu (x1 %u/%u/%u x2 %u/%u/%u y1 %u y2 %u)\n",
  706.           stream->dev->name,
  707.           sof >> 16, div_u64(((u64)sof & 0xffff) * 1000000LLU, 65536),
  708.           y, ts.tv_sec, ts.tv_nsec / NSEC_PER_USEC,
  709.           vbuf->timestamp.tv_sec,
  710.           (unsigned long)vbuf->timestamp.tv_usec,
  711.           x1, first->host_sof, first->dev_sof,
  712.           x2, last->host_sof, last->dev_sof, y1, y2);
  713.  
  714.     /* Update the V4L2 buffer. */
  715.     vbuf->timestamp.tv_sec = ts.tv_sec;
  716.     vbuf->timestamp.tv_usec = ts.tv_nsec / NSEC_PER_USEC;
  717.  
  718. done:
  719.     spin_unlock_irqrestore(&stream->clock.lock, flags);
  720. }
  721.  
  722. /* ------------------------------------------------------------------------
  723.  * Stream statistics
  724.  */
  725.  
  726. static void uvc_video_stats_decode(struct uvc_streaming *stream,
  727.         const __u8 *data, int len)
  728. {
  729.     unsigned int header_size;
  730.     bool has_pts = false;
  731.     bool has_scr = false;
  732.     u16 uninitialized_var(scr_sof);
  733.     u32 uninitialized_var(scr_stc);
  734.     u32 uninitialized_var(pts);
  735.  
  736.     if (stream->stats.stream.nb_frames == 0 &&
  737.         stream->stats.frame.nb_packets == 0)
  738.         ktime_get_ts(&stream->stats.stream.start_ts);
  739.  
  740.     switch (data[1] & (UVC_STREAM_PTS | UVC_STREAM_SCR)) {
  741.     case UVC_STREAM_PTS | UVC_STREAM_SCR:
  742.         header_size = 12;
  743.         has_pts = true;
  744.         has_scr = true;
  745.         break;
  746.     case UVC_STREAM_PTS:
  747.         header_size = 6;
  748.         has_pts = true;
  749.         break;
  750.     case UVC_STREAM_SCR:
  751.         header_size = 8;
  752.         has_scr = true;
  753.         break;
  754.     default:
  755.         header_size = 2;
  756.         break;
  757.     }
  758.  
  759.     /* Check for invalid headers. */
  760.     if (len < header_size || data[0] < header_size) {
  761.         stream->stats.frame.nb_invalid++;
  762.         return;
  763.     }
  764.  
  765.     /* Extract the timestamps. */
  766.     if (has_pts)
  767.         pts = get_unaligned_le32(&data[2]);
  768.  
  769.     if (has_scr) {
  770.         scr_stc = get_unaligned_le32(&data[header_size - 6]);
  771.         scr_sof = get_unaligned_le16(&data[header_size - 2]);
  772.     }
  773.  
  774.     /* Is PTS constant through the whole frame ? */
  775.     if (has_pts && stream->stats.frame.nb_pts) {
  776.         if (stream->stats.frame.pts != pts) {
  777.             stream->stats.frame.nb_pts_diffs++;
  778.             stream->stats.frame.last_pts_diff =
  779.                 stream->stats.frame.nb_packets;
  780.         }
  781.     }
  782.  
  783.     if (has_pts) {
  784.         stream->stats.frame.nb_pts++;
  785.         stream->stats.frame.pts = pts;
  786.     }
  787.  
  788.     /* Do all frames have a PTS in their first non-empty packet, or before
  789.      * their first empty packet ?
  790.      */
  791.     if (stream->stats.frame.size == 0) {
  792.         if (len > header_size)
  793.             stream->stats.frame.has_initial_pts = has_pts;
  794.         if (len == header_size && has_pts)
  795.             stream->stats.frame.has_early_pts = true;
  796.     }
  797.  
  798.     /* Do the SCR.STC and SCR.SOF fields vary through the frame ? */
  799.     if (has_scr && stream->stats.frame.nb_scr) {
  800.         if (stream->stats.frame.scr_stc != scr_stc)
  801.             stream->stats.frame.nb_scr_diffs++;
  802.     }
  803.  
  804.     if (has_scr) {
  805.         /* Expand the SOF counter to 32 bits and store its value. */
  806.         if (stream->stats.stream.nb_frames > 0 ||
  807.             stream->stats.frame.nb_scr > 0)
  808.             stream->stats.stream.scr_sof_count +=
  809.                 (scr_sof - stream->stats.stream.scr_sof) % 2048;
  810.         stream->stats.stream.scr_sof = scr_sof;
  811.  
  812.         stream->stats.frame.nb_scr++;
  813.         stream->stats.frame.scr_stc = scr_stc;
  814.         stream->stats.frame.scr_sof = scr_sof;
  815.  
  816.         if (scr_sof < stream->stats.stream.min_sof)
  817.             stream->stats.stream.min_sof = scr_sof;
  818.         if (scr_sof > stream->stats.stream.max_sof)
  819.             stream->stats.stream.max_sof = scr_sof;
  820.     }
  821.  
  822.     /* Record the first non-empty packet number. */
  823.     if (stream->stats.frame.size == 0 && len > header_size)
  824.         stream->stats.frame.first_data = stream->stats.frame.nb_packets;
  825.  
  826.     /* Update the frame size. */
  827.     stream->stats.frame.size += len - header_size;
  828.  
  829.     /* Update the packets counters. */
  830.     stream->stats.frame.nb_packets++;
  831.     if (len > header_size)
  832.         stream->stats.frame.nb_empty++;
  833.  
  834.     if (data[1] & UVC_STREAM_ERR)
  835.         stream->stats.frame.nb_errors++;
  836. }
  837.  
  838. static void uvc_video_stats_update(struct uvc_streaming *stream)
  839. {
  840.     struct uvc_stats_frame *frame = &stream->stats.frame;
  841.  
  842.     uvc_trace(UVC_TRACE_STATS, "frame %u stats: %u/%u/%u packets, "
  843.           "%u/%u/%u pts (%searly %sinitial), %u/%u scr, "
  844.           "last pts/stc/sof %u/%u/%u\n",
  845.           stream->sequence, frame->first_data,
  846.           frame->nb_packets - frame->nb_empty, frame->nb_packets,
  847.           frame->nb_pts_diffs, frame->last_pts_diff, frame->nb_pts,
  848.           frame->has_early_pts ? "" : "!",
  849.           frame->has_initial_pts ? "" : "!",
  850.           frame->nb_scr_diffs, frame->nb_scr,
  851.           frame->pts, frame->scr_stc, frame->scr_sof);
  852.  
  853.     stream->stats.stream.nb_frames++;
  854.     stream->stats.stream.nb_packets += stream->stats.frame.nb_packets;
  855.     stream->stats.stream.nb_empty += stream->stats.frame.nb_empty;
  856.     stream->stats.stream.nb_errors += stream->stats.frame.nb_errors;
  857.     stream->stats.stream.nb_invalid += stream->stats.frame.nb_invalid;
  858.  
  859.     if (frame->has_early_pts)
  860.         stream->stats.stream.nb_pts_early++;
  861.     if (frame->has_initial_pts)
  862.         stream->stats.stream.nb_pts_initial++;
  863.     if (frame->last_pts_diff <= frame->first_data)
  864.         stream->stats.stream.nb_pts_constant++;
  865.     if (frame->nb_scr >= frame->nb_packets - frame->nb_empty)
  866.         stream->stats.stream.nb_scr_count_ok++;
  867.     if (frame->nb_scr_diffs + 1 == frame->nb_scr)
  868.         stream->stats.stream.nb_scr_diffs_ok++;
  869.  
  870.     memset(&stream->stats.frame, 0, sizeof(stream->stats.frame));
  871. }
  872.  
  873. size_t uvc_video_stats_dump(struct uvc_streaming *stream, char *buf,
  874.                 size_t size)
  875. {
  876.     unsigned int scr_sof_freq;
  877.     unsigned int duration;
  878.     struct timespec ts;
  879.     size_t count = 0;
  880.  
  881.     ts.tv_sec = stream->stats.stream.stop_ts.tv_sec
  882.           - stream->stats.stream.start_ts.tv_sec;
  883.     ts.tv_nsec = stream->stats.stream.stop_ts.tv_nsec
  884.            - stream->stats.stream.start_ts.tv_nsec;
  885.     if (ts.tv_nsec < 0) {
  886.         ts.tv_sec--;
  887.         ts.tv_nsec += 1000000000;
  888.     }
  889.  
  890.     /* Compute the SCR.SOF frequency estimate. At the nominal 1kHz SOF
  891.      * frequency this will not overflow before more than 1h.
  892.      */
  893.     duration = ts.tv_sec * 1000 + ts.tv_nsec / 1000000;
  894.     if (duration != 0)
  895.         scr_sof_freq = stream->stats.stream.scr_sof_count * 1000
  896.                  / duration;
  897.     else
  898.         scr_sof_freq = 0;
  899.  
  900.     count += scnprintf(buf + count, size - count,
  901.                "frames:  %u\npackets: %u\nempty:   %u\n"
  902.                "errors:  %u\ninvalid: %u\n",
  903.                stream->stats.stream.nb_frames,
  904.                stream->stats.stream.nb_packets,
  905.                stream->stats.stream.nb_empty,
  906.                stream->stats.stream.nb_errors,
  907.                stream->stats.stream.nb_invalid);
  908.     count += scnprintf(buf + count, size - count,
  909.                "pts: %u early, %u initial, %u ok\n",
  910.                stream->stats.stream.nb_pts_early,
  911.                stream->stats.stream.nb_pts_initial,
  912.                stream->stats.stream.nb_pts_constant);
  913.     count += scnprintf(buf + count, size - count,
  914.                "scr: %u count ok, %u diff ok\n",
  915.                stream->stats.stream.nb_scr_count_ok,
  916.                stream->stats.stream.nb_scr_diffs_ok);
  917.     count += scnprintf(buf + count, size - count,
  918.                "sof: %u <= sof <= %u, freq %u.%03u kHz\n",
  919.                stream->stats.stream.min_sof,
  920.                stream->stats.stream.max_sof,
  921.                scr_sof_freq / 1000, scr_sof_freq % 1000);
  922.  
  923.     return count;
  924. }
  925.  
  926. static void uvc_video_stats_start(struct uvc_streaming *stream)
  927. {
  928.     memset(&stream->stats, 0, sizeof(stream->stats));
  929.     stream->stats.stream.min_sof = 2048;
  930. }
  931.  
  932. static void uvc_video_stats_stop(struct uvc_streaming *stream)
  933. {
  934.     ktime_get_ts(&stream->stats.stream.stop_ts);
  935. }
  936.  
  937. /* ------------------------------------------------------------------------
  938.  * Video codecs
  939.  */
  940.  
  941. /* Video payload decoding is handled by uvc_video_decode_start(),
  942.  * uvc_video_decode_data() and uvc_video_decode_end().
  943.  *
  944.  * uvc_video_decode_start is called with URB data at the start of a bulk or
  945.  * isochronous payload. It processes header data and returns the header size
  946.  * in bytes if successful. If an error occurs, it returns a negative error
  947.  * code. The following error codes have special meanings.
  948.  *
  949.  * - EAGAIN informs the caller that the current video buffer should be marked
  950.  *   as done, and that the function should be called again with the same data
  951.  *   and a new video buffer. This is used when end of frame conditions can be
  952.  *   reliably detected at the beginning of the next frame only.
  953.  *
  954.  * If an error other than -EAGAIN is returned, the caller will drop the current
  955.  * payload. No call to uvc_video_decode_data and uvc_video_decode_end will be
  956.  * made until the next payload. -ENODATA can be used to drop the current
  957.  * payload if no other error code is appropriate.
  958.  *
  959.  * uvc_video_decode_data is called for every URB with URB data. It copies the
  960.  * data to the video buffer.
  961.  *
  962.  * uvc_video_decode_end is called with header data at the end of a bulk or
  963.  * isochronous payload. It performs any additional header data processing and
  964.  * returns 0 or a negative error code if an error occurred. As header data have
  965.  * already been processed by uvc_video_decode_start, this functions isn't
  966.  * required to perform sanity checks a second time.
  967.  *
  968.  * For isochronous transfers where a payload is always transferred in a single
  969.  * URB, the three functions will be called in a row.
  970.  *
  971.  * To let the decoder process header data and update its internal state even
  972.  * when no video buffer is available, uvc_video_decode_start must be prepared
  973.  * to be called with a NULL buf parameter. uvc_video_decode_data and
  974.  * uvc_video_decode_end will never be called with a NULL buffer.
  975.  */
  976. static int uvc_video_decode_start(struct uvc_streaming *stream,
  977.         struct uvc_buffer *buf, const __u8 *data, int len)
  978. {
  979.     __u8 fid;
  980.  
  981.     /* Sanity checks:
  982.      * - packet must be at least 2 bytes long
  983.      * - bHeaderLength value must be at least 2 bytes (see above)
  984.      * - bHeaderLength value can't be larger than the packet size.
  985.      */
  986.     if (len < 2 || data[0] < 2 || data[0] > len) {
  987.         stream->stats.frame.nb_invalid++;
  988.         return -EINVAL;
  989.     }
  990.  
  991.     fid = data[1] & UVC_STREAM_FID;
  992.  
  993.     /* Increase the sequence number regardless of any buffer states, so
  994.      * that discontinuous sequence numbers always indicate lost frames.
  995.      */
  996.     if (stream->last_fid != fid) {
  997.         stream->sequence++;
  998.         if (stream->sequence)
  999.             uvc_video_stats_update(stream);
  1000.     }
  1001.  
  1002.     uvc_video_clock_decode(stream, buf, data, len);
  1003.     uvc_video_stats_decode(stream, data, len);
  1004.  
  1005.     /* Store the payload FID bit and return immediately when the buffer is
  1006.      * NULL.
  1007.      */
  1008.     if (buf == NULL) {
  1009.         stream->last_fid = fid;
  1010.         return -ENODATA;
  1011.     }
  1012.  
  1013.     /* Mark the buffer as bad if the error bit is set. */
  1014.     if (data[1] & UVC_STREAM_ERR) {
  1015.         uvc_trace(UVC_TRACE_FRAME, "Marking buffer as bad (error bit "
  1016.               "set).\n");
  1017.         buf->error = 1;
  1018.     }
  1019.  
  1020.     /* Synchronize to the input stream by waiting for the FID bit to be
  1021.      * toggled when the the buffer state is not UVC_BUF_STATE_ACTIVE.
  1022.      * stream->last_fid is initialized to -1, so the first isochronous
  1023.      * frame will always be in sync.
  1024.      *
  1025.      * If the device doesn't toggle the FID bit, invert stream->last_fid
  1026.      * when the EOF bit is set to force synchronisation on the next packet.
  1027.      */
  1028.     if (buf->state != UVC_BUF_STATE_ACTIVE) {
  1029.         struct timespec ts;
  1030.  
  1031.         if (fid == stream->last_fid) {
  1032.             uvc_trace(UVC_TRACE_FRAME, "Dropping payload (out of "
  1033.                 "sync).\n");
  1034.             if ((stream->dev->quirks & UVC_QUIRK_STREAM_NO_FID) &&
  1035.                 (data[1] & UVC_STREAM_EOF))
  1036.                 stream->last_fid ^= UVC_STREAM_FID;
  1037.             return -ENODATA;
  1038.         }
  1039.  
  1040.         uvc_video_get_ts(&ts);
  1041.  
  1042.         buf->buf.field = V4L2_FIELD_NONE;
  1043.         buf->buf.sequence = stream->sequence;
  1044.         buf->buf.timestamp.tv_sec = ts.tv_sec;
  1045.         buf->buf.timestamp.tv_usec =
  1046.             ts.tv_nsec / NSEC_PER_USEC;
  1047.  
  1048.         /* TODO: Handle PTS and SCR. */
  1049.         buf->state = UVC_BUF_STATE_ACTIVE;
  1050.     }
  1051.  
  1052.     /* Mark the buffer as done if we're at the beginning of a new frame.
  1053.      * End of frame detection is better implemented by checking the EOF
  1054.      * bit (FID bit toggling is delayed by one frame compared to the EOF
  1055.      * bit), but some devices don't set the bit at end of frame (and the
  1056.      * last payload can be lost anyway). We thus must check if the FID has
  1057.      * been toggled.
  1058.      *
  1059.      * stream->last_fid is initialized to -1, so the first isochronous
  1060.      * frame will never trigger an end of frame detection.
  1061.      *
  1062.      * Empty buffers (bytesused == 0) don't trigger end of frame detection
  1063.      * as it doesn't make sense to return an empty buffer. This also
  1064.      * avoids detecting end of frame conditions at FID toggling if the
  1065.      * previous payload had the EOF bit set.
  1066.      */
  1067.     if (fid != stream->last_fid && buf->bytesused != 0) {
  1068.         uvc_trace(UVC_TRACE_FRAME, "Frame complete (FID bit "
  1069.                 "toggled).\n");
  1070.         buf->state = UVC_BUF_STATE_READY;
  1071.         return -EAGAIN;
  1072.     }
  1073.  
  1074.     stream->last_fid = fid;
  1075.  
  1076.     return data[0];
  1077. }
  1078.  
  1079. static void uvc_video_decode_data(struct uvc_streaming *stream,
  1080.         struct uvc_buffer *buf, const __u8 *data, int len)
  1081. {
  1082.     unsigned int maxlen, nbytes;
  1083.     void *mem;
  1084.  
  1085.     if (len <= 0)
  1086.         return;
  1087.  
  1088.     /* Copy the video data to the buffer. */
  1089.     maxlen = buf->length - buf->bytesused;
  1090.     mem = buf->mem + buf->bytesused;
  1091.     nbytes = min((unsigned int)len, maxlen);
  1092.     memcpy(mem, data, nbytes);
  1093.     buf->bytesused += nbytes;
  1094.  
  1095.     /* Complete the current frame if the buffer size was exceeded. */
  1096.     if (len > maxlen) {
  1097.         uvc_trace(UVC_TRACE_FRAME, "Frame complete (overflow).\n");
  1098.         buf->state = UVC_BUF_STATE_READY;
  1099.     }
  1100. }
  1101.  
  1102. static void uvc_video_decode_end(struct uvc_streaming *stream,
  1103.         struct uvc_buffer *buf, const __u8 *data, int len)
  1104. {
  1105.     /* Mark the buffer as done if the EOF marker is set. */
  1106.     if (data[1] & UVC_STREAM_EOF && buf->bytesused != 0) {
  1107.         uvc_trace(UVC_TRACE_FRAME, "Frame complete (EOF found).\n");
  1108.         if (data[0] == len)
  1109.             uvc_trace(UVC_TRACE_FRAME, "EOF in empty payload.\n");
  1110.         buf->state = UVC_BUF_STATE_READY;
  1111.         if (stream->dev->quirks & UVC_QUIRK_STREAM_NO_FID)
  1112.             stream->last_fid ^= UVC_STREAM_FID;
  1113.     }
  1114. }
  1115.  
  1116. /* Video payload encoding is handled by uvc_video_encode_header() and
  1117.  * uvc_video_encode_data(). Only bulk transfers are currently supported.
  1118.  *
  1119.  * uvc_video_encode_header is called at the start of a payload. It adds header
  1120.  * data to the transfer buffer and returns the header size. As the only known
  1121.  * UVC output device transfers a whole frame in a single payload, the EOF bit
  1122.  * is always set in the header.
  1123.  *
  1124.  * uvc_video_encode_data is called for every URB and copies the data from the
  1125.  * video buffer to the transfer buffer.
  1126.  */
  1127. static int uvc_video_encode_header(struct uvc_streaming *stream,
  1128.         struct uvc_buffer *buf, __u8 *data, int len)
  1129. {
  1130.     data[0] = 2;    /* Header length */
  1131.     data[1] = UVC_STREAM_EOH | UVC_STREAM_EOF
  1132.         | (stream->last_fid & UVC_STREAM_FID);
  1133.     return 2;
  1134. }
  1135.  
  1136. static int uvc_video_encode_data(struct uvc_streaming *stream,
  1137.         struct uvc_buffer *buf, __u8 *data, int len)
  1138. {
  1139.     struct uvc_video_queue *queue = &stream->queue;
  1140.     unsigned int nbytes;
  1141.     void *mem;
  1142.  
  1143.     /* Copy video data to the URB buffer. */
  1144.     mem = buf->mem + queue->buf_used;
  1145.     nbytes = min((unsigned int)len, buf->bytesused - queue->buf_used);
  1146.     nbytes = min(stream->bulk.max_payload_size - stream->bulk.payload_size,
  1147.             nbytes);
  1148.     memcpy(data, mem, nbytes);
  1149.  
  1150.     queue->buf_used += nbytes;
  1151.  
  1152.     return nbytes;
  1153. }
  1154.  
  1155. /* ------------------------------------------------------------------------
  1156.  * URB handling
  1157.  */
  1158.  
  1159. /*
  1160.  * Set error flag for incomplete buffer.
  1161.  */
  1162. static void uvc_video_validate_buffer(const struct uvc_streaming *stream,
  1163.                       struct uvc_buffer *buf)
  1164. {
  1165.     if (stream->ctrl.dwMaxVideoFrameSize != buf->bytesused &&
  1166.         !(stream->cur_format->flags & UVC_FMT_FLAG_COMPRESSED))
  1167.         buf->error = 1;
  1168. }
  1169.  
  1170. /*
  1171.  * Completion handler for video URBs.
  1172.  */
  1173. static void uvc_video_decode_isoc(struct urb *urb, struct uvc_streaming *stream,
  1174.     struct uvc_buffer *buf)
  1175. {
  1176.     u8 *mem;
  1177.     int ret, i;
  1178.  
  1179.     for (i = 0; i < urb->number_of_packets; ++i) {
  1180.         if (urb->iso_frame_desc[i].status < 0) {
  1181.             uvc_trace(UVC_TRACE_FRAME, "USB isochronous frame "
  1182.                 "lost (%d).\n", urb->iso_frame_desc[i].status);
  1183.             /* Mark the buffer as faulty. */
  1184.             if (buf != NULL)
  1185.                 buf->error = 1;
  1186.             continue;
  1187.         }
  1188.  
  1189.         /* Decode the payload header. */
  1190.         mem = urb->transfer_buffer + urb->iso_frame_desc[i].offset;
  1191.         do {
  1192.             ret = uvc_video_decode_start(stream, buf, mem,
  1193.                 urb->iso_frame_desc[i].actual_length);
  1194.             if (ret == -EAGAIN) {
  1195.                 uvc_video_validate_buffer(stream, buf);
  1196.                 buf = uvc_queue_next_buffer(&stream->queue,
  1197.                                 buf);
  1198.             }
  1199.         } while (ret == -EAGAIN);
  1200.  
  1201.         if (ret < 0)
  1202.             continue;
  1203.  
  1204.         /* Decode the payload data. */
  1205.         uvc_video_decode_data(stream, buf, mem + ret,
  1206.             urb->iso_frame_desc[i].actual_length - ret);
  1207.  
  1208.         /* Process the header again. */
  1209.         uvc_video_decode_end(stream, buf, mem,
  1210.             urb->iso_frame_desc[i].actual_length);
  1211.  
  1212.         if (buf->state == UVC_BUF_STATE_READY) {
  1213.             uvc_video_validate_buffer(stream, buf);
  1214.             buf = uvc_queue_next_buffer(&stream->queue, buf);
  1215.         }
  1216.     }
  1217. }
  1218.  
  1219. static void uvc_video_decode_bulk(struct urb *urb, struct uvc_streaming *stream,
  1220.     struct uvc_buffer *buf)
  1221. {
  1222.     u8 *mem;
  1223.     int len, ret;
  1224.  
  1225.     /*
  1226.      * Ignore ZLPs if they're not part of a frame, otherwise process them
  1227.      * to trigger the end of payload detection.
  1228.      */
  1229.     if (urb->actual_length == 0 && stream->bulk.header_size == 0)
  1230.         return;
  1231.  
  1232.     mem = urb->transfer_buffer;
  1233.     len = urb->actual_length;
  1234.     stream->bulk.payload_size += len;
  1235.  
  1236.     /* If the URB is the first of its payload, decode and save the
  1237.      * header.
  1238.      */
  1239.     if (stream->bulk.header_size == 0 && !stream->bulk.skip_payload) {
  1240.         do {
  1241.             ret = uvc_video_decode_start(stream, buf, mem, len);
  1242.             if (ret == -EAGAIN)
  1243.                 buf = uvc_queue_next_buffer(&stream->queue,
  1244.                                 buf);
  1245.         } while (ret == -EAGAIN);
  1246.  
  1247.         /* If an error occurred skip the rest of the payload. */
  1248.         if (ret < 0 || buf == NULL) {
  1249.             stream->bulk.skip_payload = 1;
  1250.         } else {
  1251.             memcpy(stream->bulk.header, mem, ret);
  1252.             stream->bulk.header_size = ret;
  1253.  
  1254.             mem += ret;
  1255.             len -= ret;
  1256.         }
  1257.     }
  1258.  
  1259.     /* The buffer queue might have been cancelled while a bulk transfer
  1260.      * was in progress, so we can reach here with buf equal to NULL. Make
  1261.      * sure buf is never dereferenced if NULL.
  1262.      */
  1263.  
  1264.     /* Process video data. */
  1265.     if (!stream->bulk.skip_payload && buf != NULL)
  1266.         uvc_video_decode_data(stream, buf, mem, len);
  1267.  
  1268.     /* Detect the payload end by a URB smaller than the maximum size (or
  1269.      * a payload size equal to the maximum) and process the header again.
  1270.      */
  1271.     if (urb->actual_length < urb->transfer_buffer_length ||
  1272.         stream->bulk.payload_size >= stream->bulk.max_payload_size) {
  1273.         if (!stream->bulk.skip_payload && buf != NULL) {
  1274.             uvc_video_decode_end(stream, buf, stream->bulk.header,
  1275.                 stream->bulk.payload_size);
  1276.             if (buf->state == UVC_BUF_STATE_READY)
  1277.                 buf = uvc_queue_next_buffer(&stream->queue,
  1278.                                 buf);
  1279.         }
  1280.  
  1281.         stream->bulk.header_size = 0;
  1282.         stream->bulk.skip_payload = 0;
  1283.         stream->bulk.payload_size = 0;
  1284.     }
  1285. }
  1286.  
  1287. static void uvc_video_encode_bulk(struct urb *urb, struct uvc_streaming *stream,
  1288.     struct uvc_buffer *buf)
  1289. {
  1290.     u8 *mem = urb->transfer_buffer;
  1291.     int len = stream->urb_size, ret;
  1292.  
  1293.     if (buf == NULL) {
  1294.         urb->transfer_buffer_length = 0;
  1295.         return;
  1296.     }
  1297.  
  1298.     /* If the URB is the first of its payload, add the header. */
  1299.     if (stream->bulk.header_size == 0) {
  1300.         ret = uvc_video_encode_header(stream, buf, mem, len);
  1301.         stream->bulk.header_size = ret;
  1302.         stream->bulk.payload_size += ret;
  1303.         mem += ret;
  1304.         len -= ret;
  1305.     }
  1306.  
  1307.     /* Process video data. */
  1308.     ret = uvc_video_encode_data(stream, buf, mem, len);
  1309.  
  1310.     stream->bulk.payload_size += ret;
  1311.     len -= ret;
  1312.  
  1313.     if (buf->bytesused == stream->queue.buf_used ||
  1314.         stream->bulk.payload_size == stream->bulk.max_payload_size) {
  1315.         if (buf->bytesused == stream->queue.buf_used) {
  1316.             stream->queue.buf_used = 0;
  1317.             buf->state = UVC_BUF_STATE_READY;
  1318.             buf->buf.sequence = ++stream->sequence;
  1319.             uvc_queue_next_buffer(&stream->queue, buf);
  1320.             stream->last_fid ^= UVC_STREAM_FID;
  1321.         }
  1322.  
  1323.         stream->bulk.header_size = 0;
  1324.         stream->bulk.payload_size = 0;
  1325.     }
  1326.  
  1327.     urb->transfer_buffer_length = stream->urb_size - len;
  1328. }
  1329.  
  1330. static void uvc_video_complete(struct urb *urb)
  1331. {
  1332.     struct uvc_streaming *stream = urb->context;
  1333.     struct uvc_video_queue *queue = &stream->queue;
  1334.     struct uvc_buffer *buf = NULL;
  1335.     unsigned long flags;
  1336.     int ret;
  1337.  
  1338.     switch (urb->status) {
  1339.     case 0:
  1340.         break;
  1341.  
  1342.     default:
  1343.         uvc_printk(KERN_WARNING, "Non-zero status (%d) in video "
  1344.             "completion handler.\n", urb->status);
  1345.  
  1346.     case -ENOENT:       /* usb_kill_urb() called. */
  1347.         if (stream->frozen)
  1348.             return;
  1349.  
  1350.     case -ECONNRESET:   /* usb_unlink_urb() called. */
  1351.     case -ESHUTDOWN:    /* The endpoint is being disabled. */
  1352.         uvc_queue_cancel(queue, urb->status == -ESHUTDOWN);
  1353.         return;
  1354.     }
  1355.  
  1356.     spin_lock_irqsave(&queue->irqlock, flags);
  1357.     if (!list_empty(&queue->irqqueue))
  1358.         buf = list_first_entry(&queue->irqqueue, struct uvc_buffer,
  1359.                        queue);
  1360.     spin_unlock_irqrestore(&queue->irqlock, flags);
  1361.  
  1362.     stream->decode(urb, stream, buf);
  1363.  
  1364.     if ((ret = usb_submit_urb(urb, GFP_ATOMIC)) < 0) {
  1365.         uvc_printk(KERN_ERR, "Failed to resubmit video URB (%d).\n",
  1366.             ret);
  1367.     }
  1368. }
  1369.  
  1370. /*
  1371.  * Free transfer buffers.
  1372.  */
  1373. static void uvc_free_urb_buffers(struct uvc_streaming *stream)
  1374. {
  1375.     unsigned int i;
  1376.  
  1377.     for (i = 0; i < UVC_URBS; ++i) {
  1378.         if (stream->urb_buffer[i]) {
  1379. #ifndef CONFIG_DMA_NONCOHERENT
  1380.             usb_free_coherent(stream->dev->udev, stream->urb_size,
  1381.                 stream->urb_buffer[i], stream->urb_dma[i]);
  1382. #else
  1383.             kfree(stream->urb_buffer[i]);
  1384. #endif
  1385.             stream->urb_buffer[i] = NULL;
  1386.         }
  1387.     }
  1388.  
  1389.     stream->urb_size = 0;
  1390. }
  1391.  
  1392. /*
  1393.  * Allocate transfer buffers. This function can be called with buffers
  1394.  * already allocated when resuming from suspend, in which case it will
  1395.  * return without touching the buffers.
  1396.  *
  1397.  * Limit the buffer size to UVC_MAX_PACKETS bulk/isochronous packets. If the
  1398.  * system is too low on memory try successively smaller numbers of packets
  1399.  * until allocation succeeds.
  1400.  *
  1401.  * Return the number of allocated packets on success or 0 when out of memory.
  1402.  */
  1403. static int uvc_alloc_urb_buffers(struct uvc_streaming *stream,
  1404.     unsigned int size, unsigned int psize, gfp_t gfp_flags)
  1405. {
  1406.     unsigned int npackets;
  1407.     unsigned int i;
  1408.  
  1409.     /* Buffers are already allocated, bail out. */
  1410.     if (stream->urb_size)
  1411.         return stream->urb_size / psize;
  1412.  
  1413.     /* Compute the number of packets. Bulk endpoints might transfer UVC
  1414.      * payloads across multiple URBs.
  1415.      */
  1416.     npackets = DIV_ROUND_UP(size, psize);
  1417.     if (npackets > UVC_MAX_PACKETS)
  1418.         npackets = UVC_MAX_PACKETS;
  1419.  
  1420.     /* Retry allocations until one succeed. */
  1421.     for (; npackets > 1; npackets /= 2) {
  1422.         for (i = 0; i < UVC_URBS; ++i) {
  1423.             stream->urb_size = psize * npackets;
  1424. #ifndef CONFIG_DMA_NONCOHERENT
  1425.             stream->urb_buffer[i] = usb_alloc_coherent(
  1426.                 stream->dev->udev, stream->urb_size,
  1427.                 gfp_flags | __GFP_NOWARN, &stream->urb_dma[i]);
  1428. #else
  1429.             stream->urb_buffer[i] =
  1430.                 kmalloc(stream->urb_size, gfp_flags | __GFP_NOWARN);
  1431. #endif
  1432.             if (!stream->urb_buffer[i]) {
  1433.                 uvc_free_urb_buffers(stream);
  1434.                 break;
  1435.             }
  1436.         }
  1437.  
  1438.         if (i == UVC_URBS) {
  1439.             uvc_trace(UVC_TRACE_VIDEO, "Allocated %u URB buffers "
  1440.                 "of %ux%u bytes each.\n", UVC_URBS, npackets,
  1441.                 psize);
  1442.             return npackets;
  1443.         }
  1444.     }
  1445.  
  1446.     uvc_trace(UVC_TRACE_VIDEO, "Failed to allocate URB buffers (%u bytes "
  1447.         "per packet).\n", psize);
  1448.     return 0;
  1449. }
  1450.  
  1451. /*
  1452.  * Uninitialize isochronous/bulk URBs and free transfer buffers.
  1453.  */
  1454. static void uvc_uninit_video(struct uvc_streaming *stream, int free_buffers)
  1455. {
  1456.     struct urb *urb;
  1457.     unsigned int i;
  1458.  
  1459.     uvc_video_stats_stop(stream);
  1460.  
  1461.     for (i = 0; i < UVC_URBS; ++i) {
  1462.         urb = stream->urb[i];
  1463.         if (urb == NULL)
  1464.             continue;
  1465.  
  1466.         usb_kill_urb(urb);
  1467.         usb_free_urb(urb);
  1468.         stream->urb[i] = NULL;
  1469.     }
  1470.  
  1471.     if (free_buffers)
  1472.         uvc_free_urb_buffers(stream);
  1473. }
  1474.  
  1475. /*
  1476.  * Compute the maximum number of bytes per interval for an endpoint.
  1477.  */
  1478. static unsigned int uvc_endpoint_max_bpi(struct usb_device *dev,
  1479.                      struct usb_host_endpoint *ep)
  1480. {
  1481.     u16 psize;
  1482.  
  1483.     switch (dev->speed) {
  1484.     case USB_SPEED_SUPER:
  1485.         return le16_to_cpu(ep->ss_ep_comp.wBytesPerInterval);
  1486.     case USB_SPEED_HIGH:
  1487.         psize = usb_endpoint_maxp(&ep->desc);
  1488.         return (psize & 0x07ff) * (1 + ((psize >> 11) & 3));
  1489.     case USB_SPEED_WIRELESS:
  1490.         psize = usb_endpoint_maxp(&ep->desc);
  1491.         return psize;
  1492.     default:
  1493.         psize = usb_endpoint_maxp(&ep->desc);
  1494.         return psize & 0x07ff;
  1495.     }
  1496. }
  1497.  
  1498. /*
  1499.  * Initialize isochronous URBs and allocate transfer buffers. The packet size
  1500.  * is given by the endpoint.
  1501.  */
  1502. static int uvc_init_video_isoc(struct uvc_streaming *stream,
  1503.     struct usb_host_endpoint *ep, gfp_t gfp_flags)
  1504. {
  1505.     struct urb *urb;
  1506.     unsigned int npackets, i, j;
  1507.     u16 psize;
  1508.     u32 size;
  1509.  
  1510.     psize = uvc_endpoint_max_bpi(stream->dev->udev, ep);
  1511.     size = stream->ctrl.dwMaxVideoFrameSize;
  1512.  
  1513.     npackets = uvc_alloc_urb_buffers(stream, size, psize, gfp_flags);
  1514.     if (npackets == 0)
  1515.         return -ENOMEM;
  1516.  
  1517.     size = npackets * psize;
  1518.  
  1519.     for (i = 0; i < UVC_URBS; ++i) {
  1520.         urb = usb_alloc_urb(npackets, gfp_flags);
  1521.         if (urb == NULL) {
  1522.             uvc_uninit_video(stream, 1);
  1523.             return -ENOMEM;
  1524.         }
  1525.  
  1526.         urb->dev = stream->dev->udev;
  1527.         urb->context = stream;
  1528.         urb->pipe = usb_rcvisocpipe(stream->dev->udev,
  1529.                 ep->desc.bEndpointAddress);
  1530. #ifndef CONFIG_DMA_NONCOHERENT
  1531.         urb->transfer_flags = URB_ISO_ASAP | URB_NO_TRANSFER_DMA_MAP;
  1532.         urb->transfer_dma = stream->urb_dma[i];
  1533. #else
  1534.         urb->transfer_flags = URB_ISO_ASAP;
  1535. #endif
  1536.         urb->interval = ep->desc.bInterval;
  1537.         urb->transfer_buffer = stream->urb_buffer[i];
  1538.         urb->complete = uvc_video_complete;
  1539.         urb->number_of_packets = npackets;
  1540.         urb->transfer_buffer_length = size;
  1541.  
  1542.         for (j = 0; j < npackets; ++j) {
  1543.             urb->iso_frame_desc[j].offset = j * psize;
  1544.             urb->iso_frame_desc[j].length = psize;
  1545.         }
  1546.  
  1547.         stream->urb[i] = urb;
  1548.     }
  1549.  
  1550.     return 0;
  1551. }
  1552.  
  1553. /*
  1554.  * Initialize bulk URBs and allocate transfer buffers. The packet size is
  1555.  * given by the endpoint.
  1556.  */
  1557. static int uvc_init_video_bulk(struct uvc_streaming *stream,
  1558.     struct usb_host_endpoint *ep, gfp_t gfp_flags)
  1559. {
  1560.     struct urb *urb;
  1561.     unsigned int npackets, pipe, i;
  1562.     u16 psize;
  1563.     u32 size;
  1564.  
  1565.     psize = usb_endpoint_maxp(&ep->desc) & 0x7ff;
  1566.     size = stream->ctrl.dwMaxPayloadTransferSize;
  1567.     stream->bulk.max_payload_size = size;
  1568.  
  1569.     npackets = uvc_alloc_urb_buffers(stream, size, psize, gfp_flags);
  1570.     if (npackets == 0)
  1571.         return -ENOMEM;
  1572.  
  1573.     size = npackets * psize;
  1574.  
  1575.     if (usb_endpoint_dir_in(&ep->desc))
  1576.         pipe = usb_rcvbulkpipe(stream->dev->udev,
  1577.                        ep->desc.bEndpointAddress);
  1578.     else
  1579.         pipe = usb_sndbulkpipe(stream->dev->udev,
  1580.                        ep->desc.bEndpointAddress);
  1581.  
  1582.     if (stream->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
  1583.         size = 0;
  1584.  
  1585.     for (i = 0; i < UVC_URBS; ++i) {
  1586.         urb = usb_alloc_urb(0, gfp_flags);
  1587.         if (urb == NULL) {
  1588.             uvc_uninit_video(stream, 1);
  1589.             return -ENOMEM;
  1590.         }
  1591.  
  1592.         usb_fill_bulk_urb(urb, stream->dev->udev, pipe,
  1593.             stream->urb_buffer[i], size, uvc_video_complete,
  1594.             stream);
  1595. #ifndef CONFIG_DMA_NONCOHERENT
  1596.         urb->transfer_flags = URB_NO_TRANSFER_DMA_MAP;
  1597.         urb->transfer_dma = stream->urb_dma[i];
  1598. #endif
  1599.  
  1600.         stream->urb[i] = urb;
  1601.     }
  1602.  
  1603.     return 0;
  1604. }
  1605.  
  1606. /*
  1607.  * Initialize isochronous/bulk URBs and allocate transfer buffers.
  1608.  */
  1609. static int uvc_init_video(struct uvc_streaming *stream, gfp_t gfp_flags)
  1610. {
  1611.     struct usb_interface *intf = stream->intf;
  1612.     struct usb_host_endpoint *ep;
  1613.     unsigned int i;
  1614.     int ret;
  1615.  
  1616.     stream->sequence = -1;
  1617.     stream->last_fid = -1;
  1618.     stream->bulk.header_size = 0;
  1619.     stream->bulk.skip_payload = 0;
  1620.     stream->bulk.payload_size = 0;
  1621.  
  1622.     uvc_video_stats_start(stream);
  1623.  
  1624.     if (intf->num_altsetting > 1) {
  1625.         struct usb_host_endpoint *best_ep = NULL;
  1626.         unsigned int best_psize = UINT_MAX;
  1627.         unsigned int bandwidth;
  1628.         unsigned int uninitialized_var(altsetting);
  1629.         int intfnum = stream->intfnum;
  1630.  
  1631.         /* Isochronous endpoint, select the alternate setting. */
  1632.         bandwidth = stream->ctrl.dwMaxPayloadTransferSize;
  1633.  
  1634.         if (bandwidth == 0) {
  1635.             uvc_trace(UVC_TRACE_VIDEO, "Device requested null "
  1636.                 "bandwidth, defaulting to lowest.\n");
  1637.             bandwidth = 1;
  1638.         } else {
  1639.             uvc_trace(UVC_TRACE_VIDEO, "Device requested %u "
  1640.                 "B/frame bandwidth.\n", bandwidth);
  1641.         }
  1642.  
  1643.         for (i = 0; i < intf->num_altsetting; ++i) {
  1644.             struct usb_host_interface *alts;
  1645.             unsigned int psize;
  1646.  
  1647.             alts = &intf->altsetting[i];
  1648.             ep = uvc_find_endpoint(alts,
  1649.                 stream->header.bEndpointAddress);
  1650.             if (ep == NULL)
  1651.                 continue;
  1652.  
  1653.             /* Check if the bandwidth is high enough. */
  1654.             psize = uvc_endpoint_max_bpi(stream->dev->udev, ep);
  1655.             if (psize >= bandwidth && psize <= best_psize) {
  1656.                 altsetting = alts->desc.bAlternateSetting;
  1657.                 best_psize = psize;
  1658.                 best_ep = ep;
  1659.             }
  1660.         }
  1661.  
  1662.         if (best_ep == NULL) {
  1663.             uvc_trace(UVC_TRACE_VIDEO, "No fast enough alt setting "
  1664.                 "for requested bandwidth.\n");
  1665.             return -EIO;
  1666.         }
  1667.  
  1668.         uvc_trace(UVC_TRACE_VIDEO, "Selecting alternate setting %u "
  1669.             "(%u B/frame bandwidth).\n", altsetting, best_psize);
  1670.  
  1671.         ret = usb_set_interface(stream->dev->udev, intfnum, altsetting);
  1672.         if (ret < 0)
  1673.             return ret;
  1674.  
  1675.         ret = uvc_init_video_isoc(stream, best_ep, gfp_flags);
  1676.     } else {
  1677.         /* Bulk endpoint, proceed to URB initialization. */
  1678.         ep = uvc_find_endpoint(&intf->altsetting[0],
  1679.                 stream->header.bEndpointAddress);
  1680.         if (ep == NULL)
  1681.             return -EIO;
  1682.  
  1683.         ret = uvc_init_video_bulk(stream, ep, gfp_flags);
  1684.     }
  1685.  
  1686.     if (ret < 0)
  1687.         return ret;
  1688.  
  1689.     /* Submit the URBs. */
  1690.     for (i = 0; i < UVC_URBS; ++i) {
  1691.         ret = usb_submit_urb(stream->urb[i], gfp_flags);
  1692.         if (ret < 0) {
  1693.             uvc_printk(KERN_ERR, "Failed to submit URB %u "
  1694.                     "(%d).\n", i, ret);
  1695.             uvc_uninit_video(stream, 1);
  1696.             return ret;
  1697.         }
  1698.     }
  1699.  
  1700.     /* The Logitech C920 temporarily forgets that it should not be adjusting
  1701.      * Exposure Absolute during init so restore controls to stored values.
  1702.      */
  1703.     if (stream->dev->quirks & UVC_QUIRK_RESTORE_CTRLS_ON_INIT)
  1704.         uvc_ctrl_restore_values(stream->dev);
  1705.  
  1706.     return 0;
  1707. }
  1708.  
  1709. /* --------------------------------------------------------------------------
  1710.  * Suspend/resume
  1711.  */
  1712.  
  1713. /*
  1714.  * Stop streaming without disabling the video queue.
  1715.  *
  1716.  * To let userspace applications resume without trouble, we must not touch the
  1717.  * video buffers in any way. We mark the device as frozen to make sure the URB
  1718.  * completion handler won't try to cancel the queue when we kill the URBs.
  1719.  */
  1720. int uvc_video_suspend(struct uvc_streaming *stream)
  1721. {
  1722.     if (!uvc_queue_streaming(&stream->queue))
  1723.         return 0;
  1724.  
  1725.     stream->frozen = 1;
  1726.     uvc_uninit_video(stream, 0);
  1727.     usb_set_interface(stream->dev->udev, stream->intfnum, 0);
  1728.     return 0;
  1729. }
  1730.  
  1731. /*
  1732.  * Reconfigure the video interface and restart streaming if it was enabled
  1733.  * before suspend.
  1734.  *
  1735.  * If an error occurs, disable the video queue. This will wake all pending
  1736.  * buffers, making sure userspace applications are notified of the problem
  1737.  * instead of waiting forever.
  1738.  */
  1739. int uvc_video_resume(struct uvc_streaming *stream, int reset)
  1740. {
  1741.     int ret;
  1742.  
  1743.     /* If the bus has been reset on resume, set the alternate setting to 0.
  1744.      * This should be the default value, but some devices crash or otherwise
  1745.      * misbehave if they don't receive a SET_INTERFACE request before any
  1746.      * other video control request.
  1747.      */
  1748.     if (reset)
  1749.         usb_set_interface(stream->dev->udev, stream->intfnum, 0);
  1750.  
  1751.     stream->frozen = 0;
  1752.  
  1753.     uvc_video_clock_reset(stream);
  1754.  
  1755.     if (!uvc_queue_streaming(&stream->queue))
  1756.         return 0;
  1757.  
  1758.     ret = uvc_commit_video(stream, &stream->ctrl);
  1759.     if (ret < 0)
  1760.         return ret;
  1761.  
  1762.     return uvc_init_video(stream, GFP_NOIO);
  1763. }
  1764.  
  1765. /* ------------------------------------------------------------------------
  1766.  * Video device
  1767.  */
  1768.  
  1769. /*
  1770.  * Initialize the UVC video device by switching to alternate setting 0 and
  1771.  * retrieve the default format.
  1772.  *
  1773.  * Some cameras (namely the Fuji Finepix) set the format and frame
  1774.  * indexes to zero. The UVC standard doesn't clearly make this a spec
  1775.  * violation, so try to silently fix the values if possible.
  1776.  *
  1777.  * This function is called before registering the device with V4L.
  1778.  */
  1779. int uvc_video_init(struct uvc_streaming *stream)
  1780. {
  1781.     struct uvc_streaming_control *probe = &stream->ctrl;
  1782.     struct uvc_format *format = NULL;
  1783.     struct uvc_frame *frame = NULL;
  1784.     unsigned int i;
  1785.     int ret;
  1786.  
  1787.     if (stream->nformats == 0) {
  1788.         uvc_printk(KERN_INFO, "No supported video formats found.\n");
  1789.         return -EINVAL;
  1790.     }
  1791.  
  1792.     atomic_set(&stream->active, 0);
  1793.  
  1794.     /* Alternate setting 0 should be the default, yet the XBox Live Vision
  1795.      * Cam (and possibly other devices) crash or otherwise misbehave if
  1796.      * they don't receive a SET_INTERFACE request before any other video
  1797.      * control request.
  1798.      */
  1799.     usb_set_interface(stream->dev->udev, stream->intfnum, 0);
  1800.  
  1801.     /* Set the streaming probe control with default streaming parameters
  1802.      * retrieved from the device. Webcams that don't suport GET_DEF
  1803.      * requests on the probe control will just keep their current streaming
  1804.      * parameters.
  1805.      */
  1806.     if (uvc_get_video_ctrl(stream, probe, 1, UVC_GET_DEF) == 0)
  1807.         uvc_set_video_ctrl(stream, probe, 1);
  1808.  
  1809.     /* Initialize the streaming parameters with the probe control current
  1810.      * value. This makes sure SET_CUR requests on the streaming commit
  1811.      * control will always use values retrieved from a successful GET_CUR
  1812.      * request on the probe control, as required by the UVC specification.
  1813.      */
  1814.     ret = uvc_get_video_ctrl(stream, probe, 1, UVC_GET_CUR);
  1815.     if (ret < 0)
  1816.         return ret;
  1817.  
  1818.     /* Check if the default format descriptor exists. Use the first
  1819.      * available format otherwise.
  1820.      */
  1821.     for (i = stream->nformats; i > 0; --i) {
  1822.         format = &stream->format[i-1];
  1823.         if (format->index == probe->bFormatIndex)
  1824.             break;
  1825.     }
  1826.  
  1827.     if (format->nframes == 0) {
  1828.         uvc_printk(KERN_INFO, "No frame descriptor found for the "
  1829.             "default format.\n");
  1830.         return -EINVAL;
  1831.     }
  1832.  
  1833.     /* Zero bFrameIndex might be correct. Stream-based formats (including
  1834.      * MPEG-2 TS and DV) do not support frames but have a dummy frame
  1835.      * descriptor with bFrameIndex set to zero. If the default frame
  1836.      * descriptor is not found, use the first available frame.
  1837.      */
  1838.     for (i = format->nframes; i > 0; --i) {
  1839.         frame = &format->frame[i-1];
  1840.         if (frame->bFrameIndex == probe->bFrameIndex)
  1841.             break;
  1842.     }
  1843.  
  1844.     probe->bFormatIndex = format->index;
  1845.     probe->bFrameIndex = frame->bFrameIndex;
  1846.  
  1847.     stream->def_format = format;
  1848.     stream->cur_format = format;
  1849.     stream->cur_frame = frame;
  1850.  
  1851.     /* Select the video decoding function */
  1852.     if (stream->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
  1853.         if (stream->dev->quirks & UVC_QUIRK_BUILTIN_ISIGHT)
  1854.             stream->decode = uvc_video_decode_isight;
  1855.         else if (stream->intf->num_altsetting > 1)
  1856.             stream->decode = uvc_video_decode_isoc;
  1857.         else
  1858.             stream->decode = uvc_video_decode_bulk;
  1859.     } else {
  1860.         if (stream->intf->num_altsetting == 1)
  1861.             stream->decode = uvc_video_encode_bulk;
  1862.         else {
  1863.             uvc_printk(KERN_INFO, "Isochronous endpoints are not "
  1864.                 "supported for video output devices.\n");
  1865.             return -EINVAL;
  1866.         }
  1867.     }
  1868.  
  1869.     return 0;
  1870. }
  1871.  
  1872. /*
  1873.  * Enable or disable the video stream.
  1874.  */
  1875. int uvc_video_enable(struct uvc_streaming *stream, int enable)
  1876. {
  1877.     int ret;
  1878.  
  1879.     if (!enable) {
  1880.         uvc_uninit_video(stream, 1);
  1881.         if (stream->intf->num_altsetting > 1) {
  1882.             usb_set_interface(stream->dev->udev,
  1883.                       stream->intfnum, 0);
  1884.         } else {
  1885.             /* UVC doesn't specify how to inform a bulk-based device
  1886.              * when the video stream is stopped. Windows sends a
  1887.              * CLEAR_FEATURE(HALT) request to the video streaming
  1888.              * bulk endpoint, mimic the same behaviour.
  1889.              */
  1890.             unsigned int epnum = stream->header.bEndpointAddress
  1891.                        & USB_ENDPOINT_NUMBER_MASK;
  1892.             unsigned int dir = stream->header.bEndpointAddress
  1893.                      & USB_ENDPOINT_DIR_MASK;
  1894.             unsigned int pipe;
  1895.  
  1896.             pipe = usb_sndbulkpipe(stream->dev->udev, epnum) | dir;
  1897.             usb_clear_halt(stream->dev->udev, pipe);
  1898.         }
  1899.  
  1900.         uvc_video_clock_cleanup(stream);
  1901.         return 0;
  1902.     }
  1903.  
  1904.     ret = uvc_video_clock_init(stream);
  1905.     if (ret < 0)
  1906.         return ret;
  1907.  
  1908.     /* Commit the streaming parameters. */
  1909.     ret = uvc_commit_video(stream, &stream->ctrl);
  1910.     if (ret < 0)
  1911.         goto error_commit;
  1912.  
  1913.     ret = uvc_init_video(stream, GFP_KERNEL);
  1914.     if (ret < 0)
  1915.         goto error_video;
  1916.  
  1917.     return 0;
  1918.  
  1919. error_video:
  1920.     usb_set_interface(stream->dev->udev, stream->intfnum, 0);
  1921. error_commit:
  1922.     uvc_video_clock_cleanup(stream);
  1923.  
  1924.     return ret;
  1925. }
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top