Advertisement
Guest User

unpak.sh

a guest
Jul 27th, 2013
87
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 75.42 KB | None | 0 0
  1. #!/ffp/bin/sh
  2. # $Id$
  3. set -u  #Abort with unset variables
  4. set -e  #Abort with any error (can be suppressed locally using EITHER cmd||true OR set -e;cmd;set +e
  5. #
  6. #
  7. # unpak.sh -
  8. #   nzbget post processing script for Popcornhour. Based on the one release
  9. # with the August 2008 firmware.
  10. #
  11. # The script just uses syntax/commands found on the Popcorn Hour (busybox/ash)
  12. # So not all commands are present (eg wc,tail) and some do not support GNU switches.
  13. # TODO Problem displaying '%' in nzbget.log test INFO and echo
  14. # TODO Dont copy flag files to RECENT
  15. # TODO Fix double title match.. eg nzb=blah s01e03 .. blah ..s01e03.nzb
  16.  
  17. #
  18. VERSION=20081009-BETA05
  19. #   Small bugfix detected rar parts.
  20. #VERSION=20081009-BETA04
  21. #   Small bugfix for extracting name from nfo
  22. #VERSION=20081009-BETA03
  23. #   Allow _partnnn (underscore)
  24. #VERSION=20081009-BETA02
  25. #   Also Get TV Name from NFO file if available.
  26. #   Small bug fixes.
  27. #VERSION=20081002-BETA01
  28. #   Added PIN:FOLDER 'hack' until Parental lock arrives.
  29. #   Auto Category looks at NZB name in preference to media names
  30. #   Added Recently Downloaded folders (using managed hard links)
  31. #   Added IMDB Movie categorisation.
  32. #   Diskspace check
  33. #   Checked unrar status 'All OK' in stdout.
  34. #   many bugfixes.
  35. #VERSION=20080911-01
  36. #   Option to pause for entire duration of script.
  37. #   Fixed MOVE_RAR_CONTENTS to use -e test rather than -f
  38. #   Fixed Par repair bug (failing to match par files to rar file)
  39. # VERSION=20080909-02
  40. #   Fixed MOVE_RAR_CONTENTS to use mv checkingfor hidden files and avoiding glob failure.
  41. # VERSION=20080909-01
  42. #   Do a par repair if there are no rar files at all (using *.par2 not *PAR2) eg for mp3 folders.
  43. #   Fixed subtitle rar overwriting main rar if they have the same name.
  44. #   Autocategory for Music and simple TV series names.
  45. #   Join avi files if not joined by nzbget.
  46. # VERSION=20080905-03
  47. #   Minor Bug Fix - removed symlink to par2
  48. #VERSION=20080905-02
  49. #   Typo Bug Fix
  50. #VERSION=20080905-01
  51. #   Specify Alternative Completed location
  52. #   Log Estimate of time to Repair Pars and only do repairs that will be less than n minutes (configurable)
  53. #   Better logic to work with twin rar,par sets (eg cd1,cd2) where one rar works but the other needs pars.
  54. #   Better logic to work with missing start volumes.
  55. #   Stopped using hidden files as they prevent deleting via Remote Control
  56. #   Rar Parts are deleted right at the end of processing rather than during. This may help with pars that span multiple rar sets.
  57. #VERSION=20080902-01
  58. #   Better checks to ensure settings are consistent between nzbget.conf and unpak.sh.
  59. #   Copied logic used by nzbget to convert an NZB file name to the group/folder name.
  60. # v 20080901-02
  61. #   Bug fix - getting ids when there are square brackets or certain meta-characters in nzb name.
  62. # v 20080901-01
  63. #   Bug fixes. Settings verification.
  64. # v 20080831-04
  65. #  External Par Repair option
  66. # v 20080831-03
  67. #   Minor fixes.
  68. # v 20080831-01
  69. #   Sanity check if nzbget did not do any par processing.
  70. #   NZBGet , unrar paths set as options.
  71. #   Unpacking depth configurable.
  72. #   MediaCentre feature: HTML Logging for viewing in file browser mode.
  73. #   MediaCentre feature: Error Status via fake AVI file
  74. #   More bug fixes. (Rar Sanity Check)
  75. # v 20080828-03
  76. #   Added better test for ParCheck/_unbroken courtesy Hugbug.
  77. # v 20080828-02
  78. #   Fixed nested unrar bug.
  79. #   Added purging of old NZBs
  80. # v 20080828-01
  81. #   Does a quick sanity check on the rar file before unpacking.
  82. #   added IFS= to stop read command trimming white space.
  83. # v 20080827-02
  84. #   Fixed multiple attempts to unpack failed archives
  85. # v 20080827-01
  86. # - Delete files only if unrar is sucessful.
  87. # - Cope with multiple ts files in the same folder.
  88. # - Deleting is on by default - as it is more careful
  89. # --------------------------------------------------------------------
  90. # Copyright (C) 2008 Peter Roubos <peterroubos @ hotmail.com>
  91. # Copyright (C) 2008 Otmar Werner
  92. # Copyright (C) 2008 Andrei Prygounkov <hugbug @ users.sourceforge.net>
  93. # Copyright (C) 2008 Andrew Lord <nzbget @ lordy.org.uk>
  94. #
  95. # This program is free software; you can redistribute it and/or modify
  96. # it under the terms of the GNU General Public License as published by
  97. # the Free Software Foundation; either version 2 of the License, or
  98. # (at your option) any later version.
  99. #
  100. # This program is distributed in the hope that it will be useful,
  101. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  102. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  103. # GNU General Public License for more details.
  104. #
  105. # You should have received a copy of the GNU General Public License
  106. # along with this program; if not, write to the Free Software
  107. # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  108.  
  109. # Notes
  110. # Careful using ls * if there are directories (ls -d *)
  111.  
  112. #########################################################################
  113. # Settings section - see unpak.cfg.example
  114. #########################################################################
  115. # Settings are read from the file specified. If the file does not exist
  116. # it will be created from the unpak.cfg.example file.
  117. # If unpak_load_settings starts with '/' it's location is absolute,
  118. # otherwise it is relative to the location of this script.
  119. unpak_load_settings=/mnt/HD_a2/newsbin/conf/unpak.cfg
  120.  
  121. ########################################################################
  122. # SECTION: LOGGING FUNCTIONS
  123. ########################################################################
  124.  
  125. LOGSTREAM() {
  126.     label="$1" ; shift;
  127.     prefix="$1" ; shift
  128.     while IFS= read x ; do
  129.         if [ -n "$x" ] ; then
  130.             LOG "$label" "$prefix:$x"
  131.         fi
  132.     done
  133. }
  134. LOG() {
  135.     label="$1" ; shift;
  136.     if [ -n "$*" ] ; then
  137.         echo "[$label] $@"  >&2
  138.     fi
  139. }
  140.  
  141. INFO() { LOG INFO "$@" ; }
  142. WARNING() { LOG WARNING "$@" ; }
  143. ERROR() { LOG ERROR "$@" ; }
  144. DEBUG() { LOG DEBUG "$@"; }
  145.  
  146. LOG_END() {
  147.     s=
  148.     if WAITING_FOR_PARS ; then s="Waiting for PARS" ; fi
  149.     if [ "$gUnrarState" == "OK" ] ; then
  150.         INFO " ====== Post-process Finished : $1 : $NZB_NICE_NAME : $s $(date '+%T') ======"
  151.     else
  152.         ERROR " ====== Post-process Finished : $1 : $NZB_NICE_NAME : $s $(date '+%T') ======"
  153.     fi
  154. }
  155.  
  156. ########################################################################
  157. # SECTION: CONFIG FUNCTIONS
  158. ########################################################################
  159.  
  160. #Get nzbget's settings. Not these are read direct from the config file
  161. #and not the ones that nzbget may be using internally??
  162. LOAD_NZBGET_SETTINGS() {
  163.     #eg ParCheck will become nzbget_ParCheck
  164.     #Get all lines with / = / remove spaces around '=' , prefix with nzbget_ , replace x.y=z with x_y=z
  165.     NZBGET -p | grep ' = ' | sed 's/^/nzbget_/;s/ = /=/;s/\.\([^=]*\)=/_\1=/' | grep -v 'nzbget_server' > $gTmpFile.nzb_cfg
  166.     . $gTmpFile.nzb_cfg
  167.     rm $gTmpFile.nzb_cfg
  168.     set | grep '^nzbget_' | LOGSTREAM DEBUG "conf"
  169. }
  170.  
  171. SET_DEFAULT_SETTINGS() {
  172.  
  173.     # :r! grep '^unpak_' /mnt/popcorn/.nzbget/unpak.cfg  | grep -v subfolder
  174.     unpak_settings_version=1
  175.     unpak_nzbget_bin="/mnt/HD_a2/newsbin/bin/nzbget"
  176.     unpak_nzbget_conf="/mnt/HD_a2/newsbin/bin/nzbget.conf"
  177.     unpak_unrar_bin="/ffp/bin/unrar"
  178.     unpak_par2_bin="/ffp/bin/par2"
  179.     unpak_completed_dir="/mnt/HD_a2/newsbin/usenet/download" #Default location for completed downloads
  180.  
  181.     unpak_recent_age=6
  182.     unpak_recent_dir="/mnt/HD_a2/newsbin/usenet/New"    #Contains hard links to files < 2days old
  183.     unpak_recent_age2=13
  184.     unpak_recent_dir2="mnt/HD_a2/newsbin/usenet/Recent" #Contains hard links to files < 7days old
  185.     unpak_delete_from_recent_deletes_original=0
  186.  
  187.     unpak_auto_categorisation_from_filenames=1
  188.     unpak_auto_categorisation_from_newsgroups=1
  189.     unpak_auto_categorisation_from_imdb=0
  190.     unpak_imdb_movie_format="{TITLE}{ [CERT]}{ [GENRE_LIST]}{ NZB}"
  191.     unpak_imdb_title_country_filter="UK,USA"
  192.     unpak_imdb_certificate_country_filter="UK,USA"
  193.  
  194.     unpak_recent_extensions=".avi,.iso,.img,.mkv"
  195.     unpak_auto_categorisation=1
  196.     unpak_category_default=Unsorted
  197.     unpak_episodes_in_same_folder=1
  198.     unpak_debug_mode=0
  199.     unpak_sanity_check_rar_files=1
  200.     unpak_rename_img_to_iso=1
  201.     unpak_check_for_new_versions=0
  202.     unpak_nmt_html_logging=0
  203.     unpak_nmt_alert_using_fake_files=1
  204.     unpak_nmt_fake_file_extensions=".avi"
  205.     unpak_delete_rar_files=1
  206.     unpak_max_nzbfile_age=30
  207.     unpak_nested_unrar_depth=3
  208.     unpak_disable_external_par_repair=0
  209.     unpak_external_par_repair_tidy_queue=1
  210.     unpak_pause_nzbget=0
  211.     unpak_pause_nzbget_during_unrar=0
  212.     unpak_maximum_par_repair_minutes=300
  213.     unpak_nmt_pin_flag="PIN:FOLDER"
  214.     unpak_nmt_pin_folder_scrample_windows_share=1
  215.     unpak_nmt_pin_root="/mnt/HD_a2/newsbin/usenet/Other"
  216.     unpak_nmt_pin="0"
  217.     unpak_episodes_folder_case=caps
  218.     unpak_rename_existing_episode_folders=0
  219. }
  220.  
  221. unpak_settings_version=1
  222. # Cant call logging yet.
  223. MERGE_UNPAK_SETTINGS() {
  224.  
  225.     case "$unpak_load_settings" in
  226.         /*) true;;
  227.         *) unpak_load_settings="$SCRIPT_FOLDER/$unpak_load_settings" ;;
  228.     esac
  229.  
  230.     INFO "MERGE_UNPAK_SETTINGS [$unpak_load_settings]"
  231.  
  232.     if [ -n "$unpak_load_settings" ] ; then
  233.         #If there is no sample cfg - create one
  234.         if [ ! -f "$unpak_load_settings" ] ; then
  235.             cp "$SCRIPT_FOLDER/unpak.cfg.example" "$unpak_load_settings"
  236.             echo "Create $unpak_load_settings file from example"
  237.         fi
  238.  
  239.         if [ -f "$unpak_load_settings" ] ; then
  240.             if egrep -q "^ *unpak_settings_version=('|)$unpak_settings_version($|[^0-9])" "$unpak_load_settings" ; then
  241.                 echo "Loading settings from $unpak_load_settings"
  242.                 . "$unpak_load_settings"
  243.             else
  244.                 echo "Settings in $unpak_load_settings ignored. Not compatible"
  245.             fi
  246.         else
  247.             echo "Using Default Settings"
  248.         fi
  249.     fi
  250. }
  251.  
  252. CHECK_SETTINGS() {
  253.     settings=0
  254.     LOAD_NZBGET_SETTINGS
  255.     if [ "$nzbget_ParCheck" = "yes" ] ; then
  256.         INFO "config: Mandatory parchecking already enabled in nzbget.conf"
  257.         external_par_check=0
  258.     else
  259.         if [ "$unpak_disable_external_par_repair" -eq 1 ] ; then
  260.             INFO "config: ALL parchecking/reparing is completely disabled."
  261.             external_par_check=0
  262.         else
  263.             if [ "$arg_par_check" -eq 0 ]; then
  264.                 INFO "config: Parchecking enabled in $SCRIPT_NAME"
  265.                 external_par_check=1
  266.             else
  267.                 ERROR "config: nzbget has Parchecked although this is disabled in nzbget.conf. May need to restart nzbget"
  268.                 external_par_check=0
  269.             fi
  270.         fi
  271.     fi
  272.     if [ "$external_par_check" -eq 1 ] ; then
  273. #        if [ "$unpak_delete_rar_files" -eq 0 ] ; then
  274. #           ERROR "config:unpak_delete_rar_files should be set if using external par repair feature"
  275. #           NMT_FAKEFILE ERR "CONFIG unpak_delete_rar_files must be set in unpak.sh"
  276. #           settings=1
  277. #       fi
  278.         if [ "$nzbget_LoadPars" != "all" ] ; then
  279.             if [ "$nzbget_AllowReProcess" != "yes" ] ; then
  280.                 WARNING "config: IF LoadPars is not all then AllowReProcess should be yes in nzbget.conf"
  281.                 NMT_FAKEFILE WARN "CONFIG AllowReProcess should be yes in nzbget.conf"
  282.                settings=1
  283.             fi
  284.         else
  285.             if [ "$nzbget_AllowReProcess" = "yes" ] ; then
  286.                 WARNING "config: If AllowReProcess is 'yes' then its more efficient to set LoadPars=none in nzbget.conf"
  287.             fi
  288.         fi
  289.     fi
  290.     [ "$settings" -eq 0 ]
  291. }
  292.  
  293. #####################################################################
  294. # SECTION: PAR REPAIR
  295. #####################################################################
  296.  
  297. par_flag="unpak.need.pars";
  298. SET_WAITING_FOR_PARS() { touch "$par_flag" ; }
  299. CLEAR_WAITING_FOR_PARS() { rm -f "$par_flag" ; }
  300. WAITING_FOR_PARS() { [ -e "$par_flag" ] ; }
  301.  
  302. GET_PAUSED_IDS() {
  303.     # Look in the nzbget list for the given group.
  304.  
  305.     # search list using fgrep to avoid metacharacter issues '][.'
  306.     # However this may lead to substring matches (no anchoring), so surround the group name with
  307.     #asterisks first as these cannot appear inside an group name.
  308.     ids=$(NZBGET -L | sed 's/ / */;s,/,*/,' | fgrep "*$NZB_NICE_NAME*/" | sed -n '/[Pp][Aa][Rr]2 (.*paused)$/ s/^\[\([0-9]*\)\].*/\1/p')
  309.     echo $ids | sed 's/ /,/g'
  310. }
  311. #Unpauses par files. Returns error if nothing to unpause.
  312. UNPAUSE_PARS_AND_REPROCESS() {
  313.     if [ "$nzbget_AllowReProcess" != "yes" ] ; then
  314.         ERROR "AllowReProcess disabled. Cannot repair"
  315.         NMT_FAKEFILE ERR "AllowReProcess disabled. Cannot repair"
  316.         return 1
  317.     fi
  318.     INFO "Downloading pars in $arg_nzb_file"
  319.     ids=$(GET_PAUSED_IDS)
  320.     if [ -n "$ids" ] ; then
  321.         NZBGET -E U $ids
  322.         NZBGET -E T $ids
  323.         SET_WAITING_FOR_PARS
  324.     else
  325.         return 1
  326.     fi
  327. }
  328. DELETE_PAUSED_PARS() {
  329.     if [ "$unpak_external_par_repair_tidy_queue" -eq 1 ] ; then
  330.         INFO "Deleting paused parts of $arg_nzb_file"
  331.         ids=$(GET_PAUSED_IDS)
  332.         if [ -n "$ids" ] ; then
  333.             NZBGET -E D $ids
  334.         fi
  335.     fi
  336. }
  337.  
  338. #Spent over an hour before realising permisions not set properly on par2!
  339. #Make an executable copy so users dont need to telnet in
  340. NMT_FIX_PAR2_PERMISSIONS() {
  341.     if [ ! -x "$unpak_par2_bin" ] ; then
  342.         PAR2Alternative=/share/.nzbget/par2
  343.         if [ -x "$PAR2Alternative" ] ; then
  344.             unpak_par2_bin="$PAR2Alternative"
  345.         else
  346.             cp "$unpak_par2_bin" "$PAR2Alternative"
  347.             chmod o+x "$PAR2Alternative"
  348.             if [ ! -x "$PAR2Alternative" ] ; then
  349.                 ERROR "Make sure $unpak_par2_bin has execute permissions"
  350.                 NMT_FAKEFILE ERR "Change permissions on par2 executable"
  351.             else
  352.                 unpak_par2_bin="$PAR2Alternative"
  353.             fi
  354.         fi
  355.     fi
  356. }
  357.  
  358. #In case there are two or more par sets just look for .par2 files. (not PAR2 files)
  359. #TODO. We may need to know which Pars fix which rars in future so we can be more
  360. #selective with unraring when errors occur. But for now take an all or nothing approach.
  361. PAR_REPAIR_ALL() {
  362.     INFO "Start Par Repair"
  363.     NMT_FIX_PAR2_PERMISSIONS
  364.  
  365.     #First identify parsets for all FAILED or UNKNOWN rars.
  366.     if NO_RARS ; then
  367.         # Maybe mp3s etc. Just look at *.par2.
  368.         # TODO. Identify par sets correctly rather than just looking at *par2
  369.         for p in *.par2 ; do
  370.             if [ -f "$p" ] ; then
  371.                 PAR_REPAIR "$p" || true
  372.             fi
  373.         done
  374.     else
  375.         #Fix all broken rars only. These will only be top level rars.
  376.         LIST_RAR_STATES "(FAILED|UNKNOWN)" | while IFS= read rarPart ; do
  377.             #Find the first par file that looks like it may fix the rar file.
  378.             #TODO This may fail with accidental substring matches. But its quick and easy
  379.             #TODO optimize search all par2 first then PAR2
  380.             INFO "Finding PARS for $rarPart"
  381.             for p in *.par2 *.PAR2 ; do
  382.                 if [ -f "$p" ] && fgrep -l "$rarPart" "$p" > /dev/null ; then
  383.                     if PAR_REPAIR "$p" ; then
  384.                         SET_RAR_STATE "$rarPart" REPAIRED
  385.                     fi
  386.                     break
  387.                 fi
  388.             done
  389.         done
  390.     fi
  391. }
  392.  
  393. PAR_REPAIR() {
  394.     parFile="$1"
  395.  
  396.     INFO "Par Repair using $parFile"
  397.  
  398.     if [ "$pause_nzbget_during_par2repair" -eq 1 ] ; then
  399.         PAUSE_NZBGET
  400.     fi
  401.  
  402.     set +e
  403.     out=$gTmpFile.p2_out
  404.     err=$gTmpFile.p2_err
  405.     PAR_MONITOR "$out" "$$" &
  406.     "$unpak_par2_bin" repair "$parFile" > "$out" 2>"$err"
  407.     par_state=$?
  408.     set -e
  409.  
  410.     if [ "$pause_nzbget_during_par2repair" -eq 1 ] ; then
  411.         UNPAUSE_NZBGET
  412.     fi
  413.  
  414.     if [ $par_state -eq 0 ] ; then
  415.  
  416.         INFO "Repair OK : $parFile"
  417.  
  418.     else
  419.  
  420.         ERROR "Repair FAILED : $parFile"
  421.         NMT_FAKEFILE ERR PAR Repair failed
  422.         awk '!/\r/' "$out" | LOGSTREAM ERROR "par2out"
  423.         LOGSTREAM ERROR "par2err" < "$err"
  424.  
  425.     fi
  426.     rm -f "$err" "$out"
  427.     return $par_state
  428. }
  429.  
  430. # Return name of current file beg
  431. PAR_OUTPUT_GET_CURRENT_ACTION() {
  432.     # [Scanning: "filename": nn.n%] -> [Scanning: "filename"]
  433.     sed -n '/^[A-Z][a-z]*:/ s/": [^"]*$//p'
  434. }
  435. PAR_OUTPUT_GET_CURRENT_PERCENTAGE() {
  436.     # [Repairing: "filename": nn.n%] -> [nnn] (ie 000 to 1000 )
  437.     sed -n '/^Repairing:/ s/^.*": //;s/[^0-9]*//gp'
  438. }
  439.  
  440. #Get the last line from Par output. Each line may have many <CR>.
  441. #we need the text between the last two <CR>s on the last line.
  442. PAR_OUTPUT_GET_LAST_LINE() {
  443.     outfile=$1
  444.     awk 'END {
  445.            gsub(/\r$/,"") ;        #remove last CR
  446.            gsub(/.*\r/,"") ;       #Strip all text
  447.            print;
  448.        }' "$outfile"
  449. }
  450. PAR_MONITOR() {
  451.     outfile=$1
  452.     pid=$2
  453.     percent_old=
  454.     scanning_old=""
  455.     loggedParStats=0
  456.     gap=0
  457.     eta=0
  458.     initial_poll=10
  459.     scan_poll=10
  460.     short_repair_poll=20 #seconds
  461.     long_repair_poll=600 #seconds
  462.     poll_time=$initial_poll
  463.     bad_eta_count=0
  464.     DEBUG "PAR_MONITOR"
  465.     touch "$outfile"
  466.     while true ; do
  467.         sleep $poll_time
  468.         if [ ! -f "$outfile" ] ; then break ; fi
  469.         if [ ! -d "/proc/$pid" ] ; then break ; fi # Parent process gone?
  470.         line=$(PAR_OUTPUT_GET_LAST_LINE "$outfile")
  471.         case "$line" in
  472.             Repairing:*)
  473.             #Get percentage nn.m% and convert to nnm
  474.             percent_new=$(echo "$line" | PAR_OUTPUT_GET_CURRENT_PERCENTAGE)
  475.             if [ -n "$percent_new" ] ; then
  476.                 gap=$(( $gap + $poll_time ))
  477.                 DEBUG "$percent_old - $percent_new after $gap secs"
  478.                 if [ -n "$percent_old" -a "$percent_old" -ne $percent_new ] ; then
  479.  
  480.                     if [ $loggedParStats -eq 0 ]; then
  481.                         loggedParStats=1
  482.                         awk '!/\r/' "$outfile" | LOGSTREAM DEBUG "par2out"
  483.                     fi
  484.  
  485.                     eta=$(( (1000-$percent_new)*$gap/($percent_new-$percent_old) ))
  486.  
  487.                     if [ $eta -lt 60 ] ; then
  488.                         eta_text="${eta}s"
  489.                     else
  490.                         eta_text="$(( $eta/60 ))m $(( $eta % 60 ))s"
  491.                     fi
  492.  
  493.                     msg="Par repair will complete in approx. $eta_text"
  494.                     if [ $unpak_maximum_par_repair_minutes -gt 0 -a  $eta -gt $(( $unpak_maximum_par_repair_minutes * 60 )) ] ; then
  495.                         msg="$msg ( limit is ${unpak_maximum_par_repair_minutes}m )"
  496.                         if [ $bad_eta_count -le 1 ] ; then
  497.                             WARNING "$msg"
  498.                             bad_eta_count=$(( $bad_eta_count + 1 ))
  499.                         else
  500.                             ERROR "$msg"
  501.                             p2pid=$(PAR2_GETPID)
  502.                             if [ -n "$p2pid" ] ; then
  503.                                 NMT_FAKEFILE "ERR PAR Aborted too long"
  504.                                 kill $p2pid
  505.                                 break
  506.                             fi
  507.                         fi
  508.  
  509.                     else
  510.                         INFO "$msg"
  511.                     fi
  512.  
  513.  
  514.                     CLEAR_FAKEFILES repair
  515.                     NMT_FAKEFILE "INFO PAR repair $eta_text left"
  516.                     gap=0
  517.                 fi
  518.                 percent_old=$percent_new
  519.             fi
  520.             #Once we have got an eta  , adjust the reporting interval
  521.             # if par2repair looks like it is going to be a while
  522.             poll_time=$(( $eta / 20 ))
  523.             if [ $poll_time -lt $short_repair_poll ] ; then poll_time=$short_repair_poll ; fi
  524.             if [ $poll_time -gt $long_repair_poll ] ; then poll_time=$long_repair_poll ; fi
  525.  
  526.             ;;
  527.         *)  # Show General Par action. Some lines will be skipped due to polling
  528.             par_action_new=$(echo "$line" | PAR_OUTPUT_GET_CURRENT_ACTION)
  529.             if [ -n "$par_action_new" ] ; then
  530.                 poll_time=$scan_poll
  531.                 if [ "$par_action_new" != "$scanning_old" ] ; then
  532.                     INFO "PAR repair $par_action_new"
  533.                     CLEAR_FAKEFILES repair
  534.                     NMT_FAKEFILE "INFO PAR repair $par_action_new"
  535.                     scanning_old="$par_action_new"
  536.                 fi
  537.             fi
  538.         esac
  539.     done
  540.     CLEAR_FAKEFILES repair
  541. }
  542.  
  543. #If a par2 process will take too long we want to kill it.
  544. #We could use killall but this may kill other par processes.
  545. #Not sure how to find the 'process group' with limited environment.
  546. #One way to identify the correct one may be to look in /proc/*/
  547. #Works on Linux only
  548. PAR2_GETPID() {
  549.  
  550.     for pid in /proc/[0-9]* ; do
  551.         if [ "$pid/cwd" -ef "$PWD" -a "$pid/exe" -ef "$unpak_par2_bin" ] ; then
  552.             echo "$pid" | sed 's;/proc/;;'
  553.             break
  554.         fi
  555.     done
  556. }
  557.  
  558. #####################################################################
  559. # SECTION: UNRAR
  560. #####################################################################
  561. unrar_tmp_dir="unrar.tmp.dir"
  562. UNRAR_ALL() {
  563.     loop=1
  564.     INFO "Unrar all files"
  565.     CLEAR_FAKEFILES UNRAR
  566.     if [ "$unpak_pause_nzbget_during_unrar" -eq 1 ] ; then
  567.         PAUSE_NZBGET
  568.     fi
  569.     failed=0
  570.  
  571.  
  572.     while [ $failed -eq 0 -a $loop -le $unpak_nested_unrar_depth ] ; do
  573.         INFO "UNRAR-PASS $loop"
  574.         # Exclude rars matching part[0-9]*[02-9].rar or
  575.         # part[0-9]*[1-9][0-9]*1.rar (ie end in 1.rar but not 0*1.rar )
  576.         if find . -name \*.rar  2>/dev/null | sed 's;^\./;;' |\
  577.            grep -v '[._]part[0-9]*\([1-9][0-9]*1\|[02-9]\)\.rar$' > "$gTmpFile.unrar" ; then
  578.             while IFS= read rarfile ; do
  579.                 if ! UNRAR_ONE "$rarfile" ; then
  580.                     gUnrarState="Failed"
  581.                     if [ "$gPass" -eq 1 ] ; then
  582.                         #no point in trying any more until we get all pars.
  583.                         failed=1
  584.                         break
  585.                     fi
  586.                 fi
  587.             done < "$gTmpFile.unrar"
  588.         fi
  589.         rm -f "$gTmpFile.unrar"
  590.  
  591.         loop=$(($loop+1))
  592.     done
  593.     DEBUG "Done STEPS"
  594.     # Unpause NZBGet
  595.     if [ "$unpak_pause_nzbget_during_unrar" -eq 1 ] ; then
  596.         UNPAUSE_NZBGET
  597.     fi
  598.  
  599.     if ! CHECK_UNRAR_STATE ; then
  600.         gUnrarState="Failed"
  601.     fi
  602.  
  603.     if [ "$gUnrarState" = "OK" ] ; then
  604.         TIDY_RAR_FILES
  605.         TIDY_NONRAR_FILES
  606.         return 0
  607.     else
  608.         ERROR "UNRAR_ALL FAILED"
  609.         return 1
  610.     fi
  611. }
  612.  
  613. #If some top level rars are untouched then there are also missing start volumes
  614. CHECK_UNRAR_STATE() {
  615.     if [ -f "$rar_state_list" ] ; then
  616.         if egrep '(FAILED|UNKNOWN)' "$rar_state_list" > $gTmpFile.state  ;  then
  617.             LOGSTREAM ERROR "finalstate"  < $gTmpFile.state
  618.             rm -f $gTmpFile.state
  619.             return 1
  620.         fi
  621.     fi
  622.     rm -f $gTmpFile.state
  623.     return 0
  624. }
  625.  
  626. #This will do a quick sanity test for missing rar parts.
  627. #It checks number of expect parts and file size and the rar volume headers.
  628. #The main advantage of doing this check is when no par files are present. This will
  629. #check for missing volume files, and also if a rar is corrupted prior to being uploaded,
  630. #then it may catch some simple header errors.
  631. #Note if nzbget is in direct write mode then the file space is pre-allocated and the
  632. # file sizes will be correct regardless of content.
  633. RAR_SANITY_CHECK() {
  634.  
  635.     rarfile="$1"
  636.     result=0
  637.     size=$(ls -l "$rarfile" | awk '{print $5}')
  638.     INFO "Checking : $rarfile"
  639.     DEBUG RAR CHECK BEGIN $(date)
  640.     wrong_size_count=0
  641.     bname=$(RARNAME "$rarfile")
  642.     num_actual_parts=$(ls -d "$bname"* | RARNAME_FILTER_WITH_PREFIX "$bname" | LINE_COUNT)
  643.     case "$rarfile" in
  644.         *[._]part*.rar)
  645.             last_part=$(ls "$bname"[._]part*.rar | grep '[._]part[0-9]*\.rar$'| LAST_LINE)
  646.             num_expected_parts=$(echo "$last_part" | sed 's/.*[._]part0*\([0-9]*\)\.rar$/\1/')
  647.             if [ "$nzbget_DirectWrite" != "yes" ] ; then
  648.                 wrong_size_count=$(ls -l "$bname"[._]part*.rar | WRONG_SIZE_COUNT $size )
  649.             fi
  650.             if ! ( ls "$bname"[._]part*.rar | CHECK_PARTS ) ; then
  651.                 result=1
  652.             fi
  653.  
  654.             ;;
  655.         *.rar)
  656.             #num_actual_parts=$(ls "$bname.rar" "$bname".r[0-9][0-9] 2>/dev/null | LINE_COUNT)
  657.             if [ $num_actual_parts -eq 1 ] ; then
  658.                 last_part="$rarfile"
  659.                 num_expected_parts=1
  660.                 wrong_size_count=0
  661.             else
  662.                 last_part=$(ls "$bname".r[0-9][0-9] 2>/dev/null | LAST_LINE)
  663.                 num_expected_parts=$(($(echo "$last_part" | sed 's/.*\.r0*\([0-9][0-9]*\)/\1/')+2))
  664.                 if [ "$nzbget_DirectWrite" != "yes" ] ; then
  665.                     wrong_size_count=$(ls -l "$bname".r[0-9][0-9] 2>/dev/null | WRONG_SIZE_COUNT  $size)
  666.                 fi
  667.             fi
  668.             if ! ( ls "$bname.rar" "$bname".r[0-9][0-9] 2>/dev/null | CHECK_PARTS ) ; then
  669.                 result=1
  670.             fi
  671.  
  672.             ;;
  673.         *)
  674.             WARNING unknown file $rarfile
  675.             return 1
  676.             ;;
  677.         esac
  678.  
  679.         DEBUG RAR CHECK END $(date)
  680.         DEBUG RAR CHECK num_actual_parts $num_actual_parts num_expected_parts $num_expected_parts wrong_size_count $wrong_size_count
  681.  
  682.         if [ "$num_expected_parts" != "$num_actual_parts" ] ; then
  683.             ERROR Missing parts for $rarfile
  684.             NMT_FAKEFILE ERR UNRAR Missing parts for $rarfile
  685.             result=1
  686.         fi
  687.         if ! CHECK_LAST_RAR_PART "$last_part"  ; then
  688.             ERROR End parts missing for $rarfile
  689.             NMT_FAKEFILE ERR UNRAR End parts missing for $rarfile
  690.             result=1
  691.         fi
  692.         if [ "$wrong_size_count" -ne 0 ] ; then
  693.             ERROR Unexpected size for parts of $rarfile
  694.             NMT_FAKEFILE ERR UNRAR Unexpected size for parts of $rarfile
  695.             result=1
  696.         fi
  697.  
  698.         if [ $(( $size * $num_actual_parts / 1024 )) -ge `FREE_KB "."` ] ; then
  699.             ERROR "Low Disk space `FREE_KB` Remaining"
  700.             NMT_FAKEFILE ERR UNRAR Low Disk Space
  701.             result=1
  702.         fi
  703.  
  704.         return $result
  705. }
  706.  
  707. # Do a quick header check on each part.
  708. CHECK_PARTS() {
  709.  
  710.         header=0
  711.         while IFS= read part ; do
  712.             if ! CHECK_HEADER "$part" ; then
  713.                 header=1
  714.                 break
  715.             fi
  716.         done
  717.         if [ $header -eq 1 ] ; then
  718.             NMT_FAKEFILE ERR "UNRAR Header errors"
  719.         fi
  720.         [ $header -eq 0  ]
  721. }
  722.  
  723. # $1 = rar file
  724. CHECK_HEADER() {
  725.     INFO Checking header for "$1"
  726.     if  "$unpak_unrar_bin" lb "$1" > $gTmpFile.rar_hdr ; then
  727.         if [ ! -s $gTmpFile.rar_hdr ] ; then
  728.             ERROR Archive Error for "$1"
  729.             header=1
  730.         fi
  731.     else
  732.         ERROR Archive Error for "$1"
  733.         header=1
  734.     fi
  735.     rm -f $gTmpFile.rar_hdr
  736. }
  737.  
  738.  
  739. #Takes ls -l of rar parts as input and returns number of parts with unexpected size.
  740. WRONG_SIZE_COUNT() {
  741.     size=$1
  742.     ALL_BUT_LAST_LINE | awk '$5 != '$size' {print $5}' | LINE_COUNT
  743. }
  744.  
  745. #If the last file is missing the 'num_expected_parts' will be wrong, so list the
  746. #contents of the last part and check it is either '100%' or '<--'
  747. CHECK_LAST_RAR_PART() {
  748.     count=$("$unpak_unrar_bin" vl "$1" | LINE_COUNT)
  749.     code=$("$unpak_unrar_bin" vl "$1" | awk 'NR == '$count'-3 { print $3 }')
  750.     [ "$code" != "-->" -a "$code" != "<->" ]
  751. }
  752.  
  753. UNRAR_ONE() {
  754.    
  755.     tmplog="unpak.rar.out.$$"
  756.     tmperrlog="unpak.rar.err.$$"
  757.     rarfile="$1"
  758.     if [ -e "$rarfile" ] ; then
  759.         #We only change the state of rar's whose state is already set.
  760.         #These will be top level rars only. Nested rar's do not exist when the
  761.         #state list is being populated.
  762.         #This ensures that the par-repair stage is only called if  a top-level unrar fails.
  763.         state=$(GET_RAR_STATE "$rarfile")
  764.  
  765.         DEBUG "RARFILE $rarfile STATE = $state global state = $gUnrarState"
  766.         if [ "$state" = "UNKNOWN" -o "$state" = "REPAIRED" -o "$state" = "" ] ; then
  767.             #Perform additional checks if nzbget did not do any parchecking.
  768.             if [ "$arg_par_check" -eq 0 ] ; then
  769.                 if [ $unpak_sanity_check_rar_files -eq 1 ] ; then
  770.                     if ! RAR_SANITY_CHECK "$rarfile" ; then
  771.                         # Only set top level RARs as failed. (by using CHANGE_RAR_STATE not SET_RAR_STATE)
  772.                         CHANGE_RAR_STATE "$rarfile" "FAILED"
  773.                         return 1
  774.                     fi
  775.                 fi
  776.             fi
  777.             INFO "Extracting : $1"
  778.             set +e
  779.             d=$(DIRNAME "$rarfile")
  780.             r=$(BASENAME "$rarfile" "")
  781.  
  782.             #To avoid overlap issues every rar must unpack to a different local folder.
  783.             #At the very end of ALL processing we can move all infomation up into the root folder.
  784.             #
  785.             # This complexity is needed if for example we have a.rar and a.sub.rar(with a.rar(2) inside).
  786.             #
  787.             # if a.sub.rar succeeds it produces a.rar(2)
  788.             # if a.rar(1) then fails we cannot copy up a.rar(2) yet. We have to keep it down until a.rar(1) is repaired.
  789.             # This means the list of rar states may need to be updated to list rars in nested folders!
  790.  
  791.             (mkdir -p "$d/$unrar_tmp_dir" && cd "$d/$unrar_tmp_dir" && "$unpak_unrar_bin" x -y -p- "../$r" 2>"../$tmperrlog" |\
  792.                 TEE "../$tmplog" |\
  793.                 LOGSTREAM INFO "unrar"
  794.             )
  795.             set -e
  796.             if grep -q '^All OK' "$d/$tmplog" ; then
  797.                 INFO "Unrar OK : $rarfile"
  798.                 SET_RAR_STATE "$rarfile" "OK"
  799.                 ls -l "$d/$unrar_tmp_dir" | LOGSTREAM DEBUG "rarcontents"
  800.                 #Extract all lines with filenames from unrar log and add to delete queue
  801.                 sed -n "s#^Extracting from ../\(.*\)#$d/\1#p" "$d/$tmplog" >> "$delete_queue"
  802.                 rarState=0
  803.             else
  804.                 ERROR "Unrar FAILED : $rarfile"
  805.                 # Only set top level RARs as failed. (by using CHANGE_RAR_STATE not SET_RAR_STATE)
  806.                 CHANGE_RAR_STATE "$rarfile" "FAILED"
  807.                 LOGSTREAM ERROR "unrar-err" < "$d/$tmperrlog"
  808.                 NMT_FAKEFILE ERR "UNRAR from $rarfile failed"
  809.                 rarState=1
  810.             fi
  811.             rm -f "$d/$tmplog" "$d/$tmperrlog"
  812.             return $rarState
  813.         fi
  814.     fi
  815. }
  816.  
  817. ###############################################################################
  818. # SECTION: UTILS
  819. ###############################################################################
  820. FREE_KB() {
  821.     free_space=$(df -k "$1" | awk 'NR==2 {print $4}')
  822.     INFO "Freespace [$1] = $free_space"
  823.     echo "$free_space"
  824. }
  825.  
  826. #Get last line of stdin 'tail -1'
  827. LAST_LINE() {
  828.     awk 'END { print }'
  829. }
  830.  
  831. #wc -l
  832. LINE_COUNT() {
  833.     awk 'END { print NR }'
  834. }
  835.  
  836. ALL_BUT_LAST_LINE() {
  837.     sed 'x;1 d'
  838. }
  839.  
  840. NZBGET() {
  841.    DEBUG "nzbget $@"
  842.    "$unpak_nzbget_bin" -c "$unpak_nzbget_conf" "$@"
  843. }
  844. PAUSE_NZBGET() { NZBGET -P; }
  845. UNPAUSE_NZBGET() { NZBGET -U; }
  846.  
  847. GET_NICE_NZBNAME() {
  848.     #The NZBFile is converted to a nice name which is used for the group name in the nzbget list,
  849.     # and also for the folder name (if AppendNzbDir is set)
  850.     #From NZBSource the conversion is (strchr("\\/:*?\"><'\n\r\t", *p) then trailing dots and spaces are removed.
  851.     #The following sed does the same - except for the whitespace
  852.     BASENAME "$arg_nzb_file" .nzb | sed "s#['"'"*?:><\/]#_#g;s/[ .]*$//'
  853. }
  854.  
  855. quote_re="['"'"]'
  856. rar_re='[._](part[0-9][0-9]*\.rar|rar|r[0-9][0-9])$'
  857.  
  858. #Same as rarname but remove quotes.
  859. FLAGID() {
  860.     echo "$1" | sed -r "s/$rar_re//;s/$quote_re//g;"
  861. }
  862. #Note. Only top level rars that exist on the first pass have their state stored.
  863. #So we dont need to bother with nested paths.
  864. RARNAME() {
  865.     echo "$1" | sed -r "s/$rar_re//"
  866. }
  867.  
  868. # /a/b/c.rar -> /a/b/c
  869. # /a/b/c.jpg -> nothing
  870. RARNAME_FILTER() {
  871.     sed -rn "s/$rar_re//p"
  872. }
  873. # An additional string can be inserted to match the basename.
  874. RARNAME_FILTER_WITH_PREFIX() {
  875.     s=$(echo "$1" | RE_ESCAPE_FOR_SED)
  876.     sed -rn "s/^($s)$rar_re/\1/p"
  877. }
  878.  
  879. #Add '\' to regular expression metacharacters in a string.
  880. #Required so we can search for the string whilst using regualr expressions.
  881. # eg grep "^$string$". this will fail if string contains '[].* etc.
  882. RE_ESCAPE_FOR_SED() {
  883.  
  884. # fix made for http://apps.sourceforge.net/phpbb/nzbget/viewtopic.php?f=3&t=46&sid=5409e168c0c3666fe8b80127d08d6542#p188
  885.  
  886. # old setting    sed 's/\([][.*/\]\)/\\\1/g'
  887.     sed 's/\([].[*/\]\)/\\\1g'
  888. }
  889. # Same as RE_ESCAPE_FOR_SED but (|) are also meta characters.
  890. RE_ESCAPE_FOR_GREP() {
  891. # old setting    sed 's/\([][.*/\(|)]\)/\\\1/g'
  892.      sed 's/\([].[*/\(|)]\)/\\\1/g'
  893. }
  894.  
  895.  
  896. BASENAME() {
  897.     echo "$1" | sed "s|.*/||;s|$2\$||"
  898. }
  899. DIRNAME() {
  900.     echo "$1" | sed 's|^\([^/.]\)|./\1|;s|\(.\)/[^/]*$|\1|'
  901. }
  902. PRETTY_TEXT() {
  903.     #sed -r 's/[^A-Za-z0-9 /*]+/ /g' | CHANGE_CASE "$unpak_episodes_folder_case"
  904.     sed -r 's/[-:_.* ]+/ /g' | CHANGE_CASE "$unpak_episodes_folder_case"
  905. }
  906.  
  907. CHANGE_CASE() {
  908.     case "$1" in
  909.         *upper) CHANGE_CASE_AWK toupper ;;
  910.         *lower) CHANGE_CASE_AWK tolower;;
  911.         caps) CHANGE_CASE_AWK caps;;
  912.         *) cat;;
  913.     esac
  914. }
  915.  
  916. # Input - stdin $1=upper,lower,caps : output : stdout
  917. CHANGE_CASE_AWK() {
  918.  
  919.     awk '
  920.    function caps(str) {
  921.        if (match(str,/^[a-zA-Z]/)) {
  922.            return toupper(substr(str, 1, 1))tolower(substr(str, 2))
  923.        } else {
  924.            return substr(str,1,1)toupper(substr(str, 2, 1))tolower(substr(str, 3))
  925.        }
  926.    }
  927.  
  928.    {
  929.        gsub(/\//,"/ ");
  930.        for(i=1;i<=NF;i++){
  931.            #Change words that have alphabetic chars only
  932.            #if (match($i,/^[a-zA-Z]+$/)) {
  933.                $i='"$1"'($i)
  934.            #}
  935.        }
  936.        gsub(/\/ /,"/");
  937.        print
  938.    }'
  939. }
  940. # Input $1=field sep + data from std in
  941. # Output 'stdin on one line joined by field sep
  942. FLATTEN() {
  943.     awk '{printf "%s%s" (NR==1?"":"'"$1"'") $0}'
  944. }
  945.  
  946. # Tee command - borrowed from http://www.gnu.org/manual/gawk/html_node/Tee-Program.html
  947. # 'Arnold Robbins, arnold@gnu.org, Public Domain 'and tweaked a bit.
  948. TEE() {
  949.     awk '
  950.      BEGIN {
  951.          append=(ARGV[1] == "-a")
  952.          for(i=append+1 ; i<ARGC;i++) {
  953.              copy[i]=ARGV[i]
  954.              if (append == 0) printf "" > copy[i];
  955.          }
  956.          ARGC=1; #Force stdin
  957.  
  958.      }
  959.      { print ;
  960.        for (i in copy) {
  961.            print >> copy[i];
  962.        }
  963.        system(""); # Flush all buffers
  964.        #fflush("");
  965.      }
  966.      END { for (i in copy) close(copy[i]) }
  967.      ' "$@"
  968. }
  969. #Special Tee command for nzbget logging. The main command pipes
  970. #its stdout and stderr to TEE_LOGFILES which then sends it to
  971. #1. stdout (to be captured by nzbget)
  972. #2. unpak.txt (local log file)
  973. #3. unpak.html (optional)
  974. TEE_LOGFILES() {
  975.     awk '
  976.      function timestamp() {
  977.        return strftime("%T",systime());
  978.     }
  979.      BEGIN {
  980.          debug='$unpak_debug_mode'
  981.          txt=ARGV[1];
  982.          html="";
  983.          if ( '$unpak_nmt_html_logging' == 1 ) {
  984.              html=ARGV[2];
  985.          }
  986.          ARGC=1; #Force stdin
  987.  
  988.      }
  989.      {
  990.        v=substr($0,2,4);
  991.        if ( debug==1 || v!="DEBU" ) {
  992.            sub(/\]/,"] unpak:" timestamp());
  993.            print ;
  994.            print >> txt;
  995.            c="blue";
  996.            if (html != "") {
  997.                if (v=="INFO") {
  998.                    c="green";
  999.                } else if (v == "DETA" ) {
  1000.                    c="blue";
  1001.                } else if (v == "DEBU" ) {
  1002.                    c="blue";
  1003.                } else if (v == "WARN" ) {
  1004.                    c="orange";
  1005.                } else if (v == "ERRO" ) {
  1006.                    c="red";
  1007.                }
  1008.                printf "<br><font \"color=%s\">%s</font>\n",c,$0 >> html;
  1009.            }
  1010.            system(""); # Flush all buffers
  1011.        }
  1012.      }
  1013.      END { close(txt); if (html != "" ) close(html); }
  1014.      ' "$@"
  1015. }
  1016.  
  1017. #Join files with the format *.nnnn.ext or *.ext.nnnn
  1018. JOINFILES() {
  1019.  
  1020.     ext="$1"
  1021.     extname=$(echo "$ext" | sed -r 's/\.[0-9]+//g') #remove digits from extension
  1022.     glob=$(echo "$ext" | sed 's/[0-9]/[0-9]/g')            # glob pattern
  1023.  
  1024.     for part in *$ext ; do
  1025.         DEBUG "join part $part"
  1026.         if [ -f "$part" ] ; then
  1027.             bname=$(echo "$part" | sed 's/\.[^.]*\.[^.]*$//') #remove last two extensions
  1028.             newname="$bname$extname"
  1029.             INFO "Joining $newname"
  1030.             if [ -f "$newname" ] ; then
  1031.                 WARNING "$newname already exists"
  1032.             else
  1033.                 if cat "$bname"$glob > "$newname" ; then
  1034.                     rm -f "$bname"$glob
  1035.                     #true
  1036.                 else
  1037.                     mv  "$newname" "damaged_$newname"
  1038.                 fi
  1039.             fi
  1040.         fi
  1041.     done
  1042. }
  1043.  
  1044. #is $1 a sub directory of $2 ?
  1045. IS_SUBDIR() {
  1046.     sub=$(cd "$1" ; pwd)
  1047.     while [ ! "$sub" -ef "/" ] ; do
  1048.         if [ "$2" -ef "$sub" ] ; then
  1049.             DEBUG "subdir [$1] [$2] = YES"
  1050.             return 0
  1051.         fi
  1052.         sub=$(cd "$sub/.." ; pwd )
  1053.         DEBUG "Subdir = [$sub]" ;
  1054.     done
  1055.     DEBUG "subdir [$1] [$2] = NO"
  1056.     return 1
  1057. }
  1058.  
  1059. TIDY_RAR_FILES() {
  1060.     DEBUG "TIDY_NONRAR_FILES with $gUnrarState"
  1061.     if [ "$gUnrarState" = "OK" ] ; then
  1062.         if [ "$arg_par_check" -eq 0 -a "$external_par_check" -eq 1 ] ; then
  1063.             DELETE_PAUSED_PARS
  1064.         fi
  1065.         DELETE_RAR_FILES
  1066.         MOVE_RAR_CONTENTS .
  1067.         CLEAR_ALL_RAR_STATES 0
  1068.  
  1069.  
  1070.     else
  1071.         #Easier to keep NZB Local
  1072.         cp "$arg_nzb_file.queued" .
  1073.     fi
  1074. }
  1075. TIDY_NONRAR_FILES() {
  1076.     DEBUG "TIDY_NONRAR_FILES"
  1077.     JOINFILES ".0001.ts"
  1078.     JOINFILES ".avi.001"
  1079.     JOINFILES ".avi.01"
  1080.     #rm -f *.nzb *.sfv *.1 _brokenlog.txt *.[pP][aA][rR]2 *.queued
  1081.     rm -f *.nzb *.sfv *.1 _brokenlog.txt *.[pP][aA][rR]2 *.queued
  1082.     if [ "$unpak_rename_img_to_iso" -eq 1 ] ; then
  1083.         ls *.img 2>/dev/null | EXEC_FILE_LIST "mv '\1' '\1.iso'" ""
  1084.     fi
  1085.     TIDY_NZB_FILES
  1086. }
  1087.  
  1088. #Rename nzb.queued to nzb$finished_nzb_ext then delete any old *$finished_nzb_ext files.
  1089. TIDY_NZB_FILES() {
  1090.     mv "$arg_nzb_file.queued" "$arg_nzb_file$finished_nzb_ext"
  1091.     if [ $unpak_max_nzbfile_age -gt 0 ] ; then
  1092.         #-exec switch doesnt seem to work
  1093.         d=$(DIRNAME "$arg_nzb_file")
  1094.         INFO Deleting NZBs older than $unpak_max_nzbfile_age days from $d
  1095.         find "$d" -name \*$finished_nzb_ext -mtime +$unpak_max_nzbfile_age > $gTmpFile.nzb
  1096.         LOGSTREAM INFO "old nzb" < $gTmpFile.nzb
  1097.         sed "s/^/rm '/;s/$/'/" $gTmpFile.nzb | sh
  1098.         rm -f $gTmpFile.nzb
  1099.     fi
  1100. }
  1101.  
  1102. #Notification of changes to unpak.sh.
  1103. VERSION_CHECK() {
  1104.     if [ "$unpak_check_for_new_versions" -eq 1 ] ; then
  1105.         latest=$(wget -O- http://www.prodynamic.co.uk/nzbget/unpak.version 2>/dev/null)
  1106.         if [ -n "$latest" -a "$latest" != "$VERSION" ] ; then
  1107.             INFO "Version $latest is available (current = $VERSION )"
  1108.             #NMT_FAKEFILE INFO "unpak version $latest is available"
  1109.         fi
  1110.     fi
  1111. }
  1112.  
  1113. #Create a fake media file whose name indicates some kind of error has occured.
  1114. #This makes it easier to differentiate between empty folders whose contents are
  1115. #still being processed.
  1116. NMT_FAKEFILE() {
  1117.     m=$(echo "$*" | sed 's;[/\];;g')
  1118.     if [ "$unpak_nmt_alert_using_fake_files" -eq 1 ] ; then
  1119.         for ext in $unpak_nmt_fake_file_extensions ; do
  1120.             touch "UnpacK._$*_$ext"
  1121.         done
  1122.     fi
  1123. }
  1124. CLEAR_FAKEFILES() {
  1125.     for ext in $unpak_nmt_fake_file_extensions ; do
  1126.         rm -f UnpacK._*"$1"*_$ext
  1127.     done
  1128. }
  1129. CLEAR_TMPFILES() {
  1130.     rm -f /tmp/unpak.[0-9]*.*
  1131. }
  1132.  
  1133. #Store the state of each rar file.
  1134. # This is simply in a flat file with format
  1135. # id*STATE
  1136. # where id is the id based on the basename of the rar file  and
  1137. # state is its current state.
  1138. #
  1139. # If a rar file has no state it was likely extracted from inside another rar file.
  1140. # as all of the initial states are set prior to extraction. This means that at least
  1141. # one volume of a rar file must be present for it to be correctly registered.
  1142. #
  1143. # STATE   | Next Action | Next States   | Comment
  1144. # none    | UNRAR       |   none        | this could be a rar created from another rar file
  1145. # UNKNOWN | UNRAR       | OK,FAILED     | this is a top-level rar identified from any one of its parts
  1146. # OK      | All Done    |     -         | Sucess.Keep the state to avoid re-visiting when nested unpacking.
  1147. # FAILED  | par fix.    |REPAIRED,FAILED| State will stay failed.
  1148. # REPAIRED| UNRAR       | OK,FAILED     |
  1149. #
  1150. rar_state_list="unpak.state.db"
  1151. rar_state_sep="*" #Some char unlikely to appear in filenames. but not quotes. E.g. * : / \
  1152. delete_queue="unpak.delete.sh"
  1153.  
  1154. GET_RAR_STATE() {
  1155.     r=$(FLAGID "$1")
  1156.     [ ! -f $rar_state_list ] || awk "-F$rar_state_sep" '$1 == "'"$r"'" {print $2}' $rar_state_list
  1157. }
  1158. #Change if it already exists
  1159. CHANGE_RAR_STATE() {
  1160.     r=$(FLAGID "$1")
  1161.     s="$2"
  1162.     touch "$rar_state_list"
  1163.     awk "-F$rar_state_sep" '{ if ( $1=="'"$r"'" ) { print $1"'"$rar_state_sep$s"'" } else { print }}' $rar_state_list > $rar_state_list.1 &&\
  1164.     mv $rar_state_list.1 $rar_state_list
  1165. }
  1166. SET_RAR_STATE() {
  1167.     r=$(FLAGID "$1")
  1168.     s="$2"
  1169.     DEBUG "FLAGID [$1]=[$r]"
  1170.     touch "$rar_state_list"
  1171.     awk "-F$rar_state_sep" '{ if ( $1 != "'"$r"'" ) { print }} END { print "'"$r$rar_state_sep$s"'" } ' $rar_state_list > $rar_state_list.1 &&\
  1172.     mv $rar_state_list.1 $rar_state_list
  1173.     DEBUG "SET RARSTATE [$r]=[$s]"
  1174. }
  1175.  
  1176. LIST_RAR_STATES() {
  1177.     state_pattern="$1"
  1178.     touch "$rar_state_list"
  1179.     awk "-F$rar_state_sep" '{ if ( $2 ~ "'"$state_pattern"'" ) { print $1 }}' $rar_state_list
  1180. }
  1181.  
  1182. #The script is rar-driven (we may not have downloaded any pars yet and unrar before looking at pars)
  1183. #However, the initial rar file may be missing. So we need to look at all rar files present to
  1184. #know the state of rar files.
  1185. #The only situation we cant manage is where there are no rar parts at all. Unlikely.
  1186.  
  1187.  
  1188. INIT_ALL_RAR_STATES() {
  1189.     CLEAR_ALL_RAR_STATES 1
  1190.     lastPart=
  1191.  
  1192.     # Initialise the rar state file. This consist of each rar archive name
  1193.     # in the top level directory followed by '*UNKNOWN' (ie state is unknown)
  1194.     # There is one entry per multi-volume archive.
  1195.     # There are only entries if volumes are present at the start of processing.
  1196.     ls | awk '
  1197.    BEGIN {last_flag=""}
  1198.    {
  1199.    if (sub(/'"$rar_re"'/,"")) {
  1200.        gsub(/['"$quote_re"']/,"") #REMOVE quotes to get FLAGID
  1201.        flag=$0
  1202.        if (flag != last_flag) {
  1203.            print flag "'$rar_state_sep'UNKNOWN"
  1204.            last_flag = flag
  1205.        }
  1206.    }}' > "$rar_state_list"
  1207.    
  1208.     LOGSTREAM DEBUG "init" < "$rar_state_list"
  1209. }
  1210.  
  1211. #We have to unpack each rar in its own folder to avoid clashes.
  1212. #This function should be called right at the end to push everything up
  1213. #to the main folder.
  1214.  
  1215. MOVE_RAR_CONTENTS() {
  1216.  
  1217.     #INFO "Move rar contents into $1 = $(pwd)"
  1218.     if [ -d "$unrar_tmp_dir" ]; then
  1219.         DEBUG "Moving rar contents up from [$PWD/$unrar_tmp_dir]"
  1220.         ( cd "$unrar_tmp_dir"; MOVE_RAR_CONTENTS "../$1" )
  1221.         #Copy directory up.
  1222.         #
  1223.         # could use mv $unrar_tmp_dir/* . but two problems.
  1224.         #
  1225.         # Hidden files and
  1226.         # mv with globbing will return an error if no files match.
  1227.         #But we dont really mind that, we only want an error if there was
  1228.         #a problem actually moving a real file.
  1229.         #
  1230.         ls -A "$unrar_tmp_dir" | EXEC_FILE_LIST "mv '$unrar_tmp_dir/\1' ." -e
  1231.         rmdir "$unrar_tmp_dir"
  1232.     fi
  1233. }
  1234.  
  1235.  
  1236. DELETE_RAR_FILES() {
  1237.     if [ -f "$delete_queue" ] ; then
  1238.         if [ $unpak_delete_rar_files -eq 1 ] ; then
  1239.             EXEC_FILE_LIST "rm '\1'" "-e" < "$delete_queue"
  1240.         else
  1241.             mv "$delete_queue" "$delete_queue.bak"
  1242.         fi
  1243.     fi
  1244.     rm -f "$delete_queue"
  1245. }
  1246.  
  1247. CLEAR_ALL_RAR_STATES() {
  1248.     force=$1
  1249.     if [ "$force" -eq 1 -o $unpak_debug_mode -eq 0 ] ; then
  1250.         rm -f "$rar_state_list"
  1251.     fi
  1252. }
  1253.  
  1254. LOG_ARGS() {
  1255.     cmd="'$0'"
  1256.     for i in "$@" ; do
  1257.         cmd="$cmd '$i' "
  1258.     done
  1259.     INFO "ARGS: $cmd"
  1260. }
  1261.  
  1262. #If the unpak_episodes_folder_case option is selected rename existing folders.
  1263. # This function needs fixing but it's only really for people that want to move all
  1264. #of thier folders from one caps format to another
  1265. #It doesnt do the movie folder.
  1266. RENAME_EXISTING_EPISODE_FOLDERS() {
  1267.     DEBUG "BEGIN Episode Folder Rename"
  1268.     if [ "$unpak_rename_existing_episode_folders" -eq 1 ] ; then
  1269.         case "$unpak_episodes_folder_case" in
  1270.             lower|upper|caps)
  1271.                 tv_root=$(DIRNAME "$(CREATE_CATEGORY_PATH 'Tv')")
  1272.                 tv_new_name=$(echo "tv" | CHANGE_CASE "$unpak_episodes_folder_case" )
  1273.                 for t in TV Tv tV tv ; do
  1274.                     RENAME_EXISTING_EPISODE_SUB_FOLDERS "$tv_root" "$t" "$tv_new_name"
  1275.                 done
  1276.             ;;
  1277.             *) ;;
  1278.         esac
  1279.     fi
  1280.     DEBUG "END Episode Folder Rename"
  1281. }
  1282.  
  1283. # $1=root $2=oldname $3=new name
  1284. RENAME_EXISTING_EPISODE_SUB_FOLDERS() {
  1285.     if [ -d "$1/$2" ] ; then
  1286.  
  1287.         ls -AF "$1/$2" | grep '/' | while IFS= read tv_path ; do
  1288.             #if [ -d "$1/$2/$tv_path" ] ; then
  1289.                 tv_path_new=$(echo "$tv_path" | CHANGE_CASE "$unpak_episodes_folder_case" )
  1290.                 RENAME_EXISTING_EPISODE_SUB_FOLDERS "$1/$2" "$tv_path" "$tv_path_new"
  1291.             #fi
  1292.         done
  1293.         #DEBUG "Renaming Episode Folder [$1/$2] -> [$1/$3]"
  1294.         MERGE_FOLDERS "$1/$2" "$1/$3" > /dev/null
  1295.         #DEBUG "DONE Renaming Episode Folder [$1/$2] -> [$1/$3]"
  1296.     fi
  1297. }
  1298.  
  1299. #Move command that merges non-empty directories.
  1300. #$1=source
  1301. #$2=dest
  1302. #stdout = list of moved files.
  1303. MERGE_FOLDERS() {
  1304.     if [ ! "$1" -ef "$2" ] ; then
  1305.         DEBUG "MERGE CONTENTS [$1]->[$2]"
  1306.         if [ ! -e "$2" ] ; then
  1307.             mkdir -p "$2"
  1308.         fi
  1309.         ls -A "$1" | while IFS= read f ; do
  1310.             if [ -d "$1/$f" ] ;then
  1311.                 if [ -e "$2/$f" ] ; then
  1312.                     MERGE_FOLDERS "$1/$f" "$2/$f"
  1313.                 else
  1314.                     DEBUG "MVD [$1/$f] [$2/.]"
  1315.                     mv "$1/$f" "$2/."
  1316.                 fi
  1317.             else
  1318.                 DEBUG "MVF [$1/$f] [$2/.]"
  1319.                 rm -f "$2/$f"
  1320.                 mv "$1/$f" "$2/."
  1321.                 echo "$2/$f" #output
  1322.             fi
  1323.         done
  1324.         rmdir "$1"
  1325.         DEBUG "END MERGE CONTENTS [$1]->[$2]"
  1326.     fi
  1327. }
  1328.  
  1329. #Stdout = new category
  1330. #If category ends in '*' then the folder contents are moved, otherwise
  1331. #the download folder itself is moved into the category folder.
  1332. AUTO_CATEGORY() {
  1333.  
  1334.     cat_from_files=$(AUTO_CATEGORY_FROM_FOLDER)
  1335.     DEBUG "autocategory cat_from_files=[$cat_from_files]"
  1336.     cat="$cat_from_files"
  1337.  
  1338.     cat_from_nzb=$(AUTO_CATEGORY_FROM_NEWSGROUPS_INSIDE_NZB)
  1339.     DEBUG "autocategory cat_from_nzb=[$cat_from_nzb]"
  1340.  
  1341.     # Try to get category from newsgroups embedded in the nzb
  1342.     if [ -z "$cat" ] ; then
  1343.         cat="$cat_from_nzb"
  1344.     else
  1345.         #Make sure PIN:FOLDER takes priority
  1346.         case "$cat_from_nzb" in
  1347.             $unpak_nmt_pin_flag*) cat="$cat_from_nzb" ;;
  1348.         esac
  1349.     fi
  1350.     DEBUG "autocategory check nfo cat=[$cat]"
  1351.  
  1352.     if [ -z "$cat" ] ; then
  1353.         cat=$(AUTO_CATEGORY_FROM_IMDB)
  1354.     fi
  1355.     DEBUG "autocategory: check pin cat=[$cat]"
  1356.  
  1357.     cat=$(NMT_SUBSTITUTE_PIN_FOLDER "$cat")
  1358.  
  1359.     #If the categoty is not a full path then make it pretty
  1360.     #but if it's empty make it the default category.
  1361.     case "$cat" in
  1362.         /*) true ;;
  1363.         ?*) cat=$(echo "$cat" | PRETTY_TEXT) ;;
  1364.         *) cat="$unpak_category_default" ;;
  1365.     esac
  1366.  
  1367.     if [ $unpak_episodes_in_same_folder -eq 1 ] ; then
  1368.         if echo "$cat" | grep -iq '^Tv/' ; then
  1369.             #Note asterisk on the end means copy individual files into category not the entire folder.
  1370.             cat="$cat$flatten"
  1371.         fi
  1372.     fi
  1373.     DEBUG "autocategory final: cat=[$cat]"
  1374.     echo "$cat"
  1375. }
  1376.  
  1377. AUTO_CATEGORY_FROM_IMDB() {
  1378.     if [ "$unpak_auto_categorisation_from_imdb" -ne 1 ] ; then return 0 ; fi
  1379.     imdb_url=`IMDB_EXTRACT_LINK_FROM_NFO`
  1380.     if [ -n "$imdb_url" ] ; then
  1381.         DEBUG "IMDB URL [$imdb_url]"
  1382.         imdb_page="$gTmpFile.imdb"
  1383.         wget -O "$imdb_page" "$imdb_url"
  1384.         title=$(IMDB_EXTRACT_TITLE "$imdb_url" "$imdb_page")
  1385.         DEBUG "IMDB title = [$title]"
  1386.         if IMDB_ISMOVIE "$imdb_page" ; then
  1387.             original_folder_name=`BASENAME "$arg_download_dir" "" | PRETTY_TEXT`
  1388.  
  1389.             folder="$unpak_imdb_movie_format"
  1390.  
  1391.             cert=$(IMDB_GET_CERTIFICATES "$imdb_page" )
  1392.             REPLACE_TOKEN folder CERT "$cert"
  1393.  
  1394.             DEBUG "folder after cert = [$folder]"
  1395.  
  1396.             REPLACE_TOKEN folder TITLE "$title"
  1397.             DEBUG "folder after title = [$folder]"
  1398.  
  1399.             genre=$(IMDB_GET_GENRES < "$imdb_page" | FLATTEN "-" )
  1400.             REPLACE_TOKEN folder GENRE_LIST "$genre"
  1401.             DEBUG "folder after genre = [$folder]"
  1402.  
  1403.             REPLACE_TOKEN folder NZB "$original_folder_name"
  1404.             DEBUG "folder after nzb = [$folder]"
  1405.  
  1406.             if [ -z "$folder" ] ; then
  1407.                 folder="$original_folder_name"
  1408.             fi
  1409.  
  1410.             echo "Movies/$folder$flatten"
  1411.  
  1412.         else
  1413.             echo "Tv/$title"
  1414.         fi
  1415.         rm -f "$imdb_page"
  1416.     fi
  1417. }
  1418.  
  1419. # Replace %..TOKEN..% with the token value.
  1420. # Characters between % and token are only replaced if token is not empty.
  1421. REPLACE_TOKEN() {
  1422.     name="$1"
  1423.     eval "old=\$$1"
  1424.     token="$2"
  1425.     new="$3"
  1426.     DEBUG "BEGIN TOKEN name=[$name] old=[$old] token=[$token] new=[$new]"
  1427.     eval "DEBUG $name=[\$$name]"
  1428.     case "$old" in
  1429.         *$token*)
  1430.         if [ -n "$new" ] ; then
  1431.                 new=$(echo "$old" | sed -r "s/\{([^{]*)$token([^}]*)\}/\1$new\2/g")
  1432.         else
  1433.                 new=$(echo "$old" | sed -r "s/\{([^{]*)$token([^}]*)\}//g")
  1434.         fi
  1435.         eval "$name=\$new"
  1436.         ;;
  1437.     esac
  1438.     DEBUG "TOKEN name=[$name] old=[$old] token=[$token] new=[$new]"
  1439.     eval "DEBUG $name=[\$$name]"
  1440. }
  1441. APPEND() {
  1442.     # concatenate with optional space. ie. var=$var${var:+ }$text
  1443.     DEBUG "APPEND [$1][$2]"
  1444.     eval $1="\$$1\${$1:+ }'$2'"
  1445. }
  1446.  
  1447. AUTO_CATEGORY_FROM_FOLDER() {
  1448.     # Try to extract series.
  1449.     # Used to match series and episode were there is a 'S' or 'E' designator
  1450.     # Will look in 3 places:
  1451.     # 1. Inside *.nfo (Often the best file name is in here)
  1452.     # 2. Name of the folder
  1453.     # 3. Names of the media files
  1454.     #
  1455.     # It looking mainly for TV program format, in oder of preference .
  1456.     # s01e01 / s2d2
  1457.     # 1x1
  1458.     # 101
  1459.  
  1460.     if [ "$unpak_auto_categorisation_from_filenames" -ne 1 ] ; then return 0 ; fi
  1461.  
  1462.     cat=
  1463.  
  1464.     DEBUG "check 1 cat=[$cat]"
  1465.     cat=`AUTO_CATEGORY_FROM_FILENAMES_SERIES "S01E01"`
  1466.  
  1467.     DEBUG "check 2 cat=[$cat]"
  1468.     if [ -z "$cat" ] ; then
  1469.         cat=`AUTO_CATEGORY_FROM_FILENAMES_SERIES "1x01"`
  1470.     fi
  1471.     DEBUG "check 3 cat=[$cat]"
  1472.     if [ -z "$cat" ] ; then
  1473.         cat=`AUTO_CATEGORY_FROM_FILENAMES_SERIES "101" `
  1474.     fi
  1475.     DEBUG "check 4 cat=[$cat]"
  1476.  
  1477.     if [ -z "$cat" ] ; then
  1478.         if ls . * | egrep -iq '\.(mp3|flac|wma|wav|ogg)$' ; then cat="Music" ; fi
  1479.     fi
  1480.     DEBUG "check 5 cat=[$cat]"
  1481.  
  1482.     if [ -z "$cat" ] ; then
  1483.         if ls . * | egrep -iq '\.(exe)$' ; then cat="Software" ; fi
  1484.     fi
  1485.     DEBUG "check 6 cat=[$cat]"
  1486.     echo "$cat"
  1487. }
  1488.  
  1489. # Try to match a TV series type name. s01e01 s1d1 etc.
  1490. # $1 = re to match name
  1491. # $2 = re to match series
  1492. # $3 = re to skip non-interesting characters.
  1493. # output = Series Category or ''
  1494. AUTO_CATEGORY_FROM_FILENAMES_SERIES() {
  1495.  
  1496.     mode="$1"
  1497.  
  1498.     #Match 1 or 2 digit series or episode. USed when a prefix is given eg S05 or E3
  1499.     n="([0-9]{1,2})"
  1500.  
  1501.     #Used to match series when no prefix is present. eg 704 = S07e04.
  1502.     # or 1103 = series 11 episode 3.
  1503.     #Match 0n , n , or nn except 19 and 20 - as these are often century eg [20]08
  1504.     #if there is a s20e08 then hope it has prefix!
  1505.     #LastOfTheSummerWine is on Season 29!
  1506.     #Our max season will be 40.
  1507.     nbare="([1-9]|0[1-9]|1[0-8]|2[1-9]|[34][0-9])"
  1508.  
  1509.     #Match 2 digit episode - used when the is no prefix.
  1510.     n2="([0-9]{2})"
  1511.  
  1512.     c="0-9a-zA-Z"
  1513.     skip="[^$c]"
  1514.     keep="[$c]"
  1515.     noSpace="[^ ]"
  1516.  
  1517.     case "$mode" in
  1518.         S01E01)
  1519.         series="$skip+[sS]$n[dDeE]$n$skip"
  1520.         seriesNoSpace="$noSpace+[sS]$n[dDeE]$n$skip"
  1521.         ;;
  1522.  
  1523.         1x01)
  1524.         series="$skip+$n[xX]$n$skip"
  1525.         seriesNoSpace="$noSpace+$n[xX]$n$skip"
  1526.         ;;
  1527.  
  1528.         101)
  1529.         series="$skip+$nbare$n2$skip"
  1530.         seriesNoSpace="$noSpace+$nbare$n2$skip"
  1531.         ;;
  1532.         *) exit 1;;
  1533.     esac
  1534.  
  1535.     nameIncSpace="$skip*($keep[^:]*$keep)"
  1536.     nameNoSpace="$.*($keep[^ :]*$keep)"
  1537.     nfo_pattern="$nameNoSpace$seriesNoSpace"
  1538.     nzb_pattern="$nameIncSpace$series"
  1539.     media_pattern="$nameIncSpace$series.*\.(avi|AVI|mkv|MKV|nfo|NFO)$"
  1540.  
  1541.     extract_from_nfo_name=
  1542.     if [ "$mode" != 101 ] ; then
  1543.         extract_from_nfo_name=$(cat *.nfo 2>/dev/null | sed -rn "/$nfo_pattern/ p")
  1544.     fi
  1545.     extract_from_nzb_name=$(echo "$NZB_NICE_NAME-" | sed -rn "/$nzb_pattern/ p")
  1546.     extract_from_media_name=$(ls | sed -rn "/$media_pattern/ p" | head -1)
  1547.     if [ -n "$extract_from_nfo_name" ] ; then
  1548.         nameAndSeries="$nameNoSpace$seriesNoSpace"
  1549.         cat="$extract_from_nfo_name"
  1550.         INFO "Getting category from NFO filename [$cat] using [$nameAndSeries]"
  1551.     else
  1552.         nameAndSeries="$nameIncSpace$series"
  1553.         if [ -n "$extract_from_nzb_name" ] ; then
  1554.             cat="$extract_from_nzb_name"
  1555.             INFO "Getting category from NZB filename [$cat] using [$nameAndSeries]"
  1556.         else
  1557.             if [ -n "$extract_from_media_name" ] ; then
  1558.                 cat="$extract_from_media_name"
  1559.                 INFO "Getting category from media filenames [$cat] using [$series]"
  1560.             fi
  1561.         fi
  1562.     fi
  1563.  
  1564.     if [ -n "$cat" ] ; then
  1565.         sub_reqid="s/.*$skip([Pp][Rr][Ee][Ss][Ee][Nn][Tt][Ss]$skip*|[Rr][Ee][Qq]$skip*|#)[0-9]{5}//"
  1566.         sub_reqtxt="s/.*$skip+[Rr][Ee][Qq]$skip//"
  1567.         sub_leading_zeros="s/([^1-9]|^)0+([1-9])/\1\2/g"
  1568.  
  1569.         #Convert 'blah REQ blah 12345 the.fonZ - s01e02' to 'tv/the fonZ/s1'  
  1570.         # Series = \2
  1571.         # Episode = \3
  1572.  
  1573.         cat=$(echo "$cat" | sed -r  "$sub_reqid;$sub_reqtxt;s@$nameAndSeries.*@Tv/\1/Series \2@;$sub_leading_zeros")
  1574.  
  1575.        
  1576.     fi
  1577.     DEBUG "cat=[$cat] using series=[$series]"
  1578.     echo "$cat"
  1579. }
  1580. AUTO_CATEGORY_FROM_NEWSGROUPS_INSIDE_NZB() {
  1581.     if [ "$unpak_auto_categorisation_from_newsgroups" -ne 1 ] ; then return 0 ; fi
  1582.     #Get values of all subfolder_by_newsgroup_ variables.
  1583.     set | sed -n '/^unpak_subfolder_by_newsgroup_[0-9]/ s/^[^=]*=//p' | sed "s/^' *//g;s/ *: */=/;s/ *'$//g" |\
  1584.         while IFS== read keyword destination ; do
  1585.             DEBUG "Check category $keyword=$destination"
  1586.             if grep -ql "<group>.*$keyword.*</group>" "$arg_nzb_file$finished_nzb_ext" ; then
  1587.                 INFO "Getting category from newsgroup matching [$keyword]"
  1588.                 echo "$destination"
  1589.                 break
  1590.             fi
  1591.         done
  1592. }
  1593.  
  1594. CREATE_CATEGORY_PATH() {
  1595.     case "$1" in
  1596.         /*) new_path="$1" ;;
  1597.         *) new_path="$unpak_completed_dir/$1" ;;
  1598.     esac
  1599.     CREATE_FOLDER_RELATIVE_TO_DESTDIR "$new_path"
  1600. }
  1601.  
  1602. #cat = final location*Category
  1603. #Also changes directory so must not be run from subprocess.
  1604. RELOCATE() {
  1605.     cat="$arg_category"
  1606.     move_folder=1
  1607.     DEBUG "1 move_folder=[$move_folder] cat=[$cat]"
  1608.     if [ -z "$cat" -a $unpak_auto_categorisation -eq 1 ] ; then
  1609.         cat=$(AUTO_CATEGORY)
  1610.         DEBUG "2a cat=[$cat]"
  1611.         case "$cat" in
  1612.            *$flatten) cat=`echo "$cat" | sed "s/$flatten\$//"`
  1613.                 DEBUG "2 move_folder=[$move_folder]"
  1614.                 move_folder=0
  1615.                ;;
  1616.        esac
  1617.     fi
  1618.     DEBUG "3 move_folder=[$move_folder] cat=[$cat]"
  1619.  
  1620.     RENAME_EXISTING_EPISODE_FOLDERS
  1621.  
  1622.     new_path=$(CREATE_CATEGORY_PATH "$cat")
  1623.  
  1624.     b=$(BASENAME "$arg_download_dir" "")
  1625.  
  1626.     if [ ! "$new_path/$b" -ef "$arg_download_dir" ] ; then
  1627.  
  1628.         INFO "Moving $arg_download_dir to $new_path/. [Category=$cat]"
  1629.  
  1630.         DEBUG "4 move_folder=[$move_folder]"
  1631.         if [ "$move_folder" -eq 1 ] ; then
  1632.             new_path=$( GET_NEW_FOLDER_NAME "$new_path" "$b" )
  1633.             DEBUG "new_path= [$new_path]"
  1634.             mkdir -p "$new_path"
  1635.         fi
  1636.  
  1637.         #If unpak_nmt_pin_folder_scrample_windows_share is set then add an asterisk to the
  1638.         #folder name, if it is going in the pin_root folder. This will force samba to
  1639.         #use an alternate name, so may help avoid inadvertent viewing.
  1640.         #Again this is no replacement for real security.
  1641.         if [ "$unpak_nmt_pin_folder_scrample_windows_share" -eq 1 ]; then
  1642.             if IS_SUBDIR "$new_path" "$unpak_nmt_pin_root" ; then
  1643.                 mv "$new_path" "$new_path:"
  1644.                 new_path="$new_path:"
  1645.             fi
  1646.         fi
  1647.  
  1648.         # The output is used later to update the 'RECENT' folders.
  1649.         MERGE_FOLDERS "$arg_download_dir" "$new_path" > $gTmpFile.moved_files
  1650.         cd "$new_path"
  1651.     fi
  1652.     INFO "Relocated cat=[$cat] cwd=[$PWD]"
  1653. }
  1654.  
  1655. #$1=root $2=folder
  1656. GET_NEW_FOLDER_NAME() {
  1657.     #Move entire folder but rename if there is a clash TODO : TEST
  1658.     if [ -d "$1/$2" ] ; then
  1659.         loop=1
  1660.         while [ -d "$1/$2.$loop" ] ; do
  1661.             loop=$(( $loop + 1 ))
  1662.         done
  1663.         echo "$1/$2.$loop"
  1664.     else
  1665.         echo "$1/$2"
  1666.     fi
  1667. }
  1668.  
  1669. #Input = $1=imdb page
  1670. #Output = Return status 0=tv 1=not tv
  1671. IMDB_ISTV() {
  1672.     grep -ql 'episodes#season-1' "$1"
  1673. }
  1674.  
  1675. #Input = $1=imdb page
  1676. #Output = Return status 0=movie 1=not movie
  1677. IMDB_ISMOVIE() {
  1678.     if IMDB_ISTV "$1" ; then
  1679.         INFO ISTV
  1680.         return 1
  1681.     else
  1682.         return 0
  1683.     fi
  1684. }
  1685.  
  1686. #Input  $1=main imdb url $2=html text of main url
  1687. #Output = Title of film according to $unpak_imdb_title_country_filter
  1688. IMDB_EXTRACT_TITLE() {
  1689.     if grep -q 'releaseinfo#akas' "$2" ; then
  1690.         #Get the title using the AKA page
  1691.         IMDB_EXTRACT_AKA_TITLE "$1" "$unpak_imdb_title_country_filter" | grep -v "^DBG:"
  1692.     else
  1693.         #Get the title from main page
  1694.         grep '<title>' "$2" | sed 's/.*<title>//;s/ (.*//;s/&[^;]*;//g'
  1695.     fi
  1696. }
  1697. #$1=main url
  1698. #$2=Countries
  1699. #   If language is not chosen one then..get first match from AKA pagif none match use ti (Doesnt work for Two Brothers)
  1700. #   OR Get the First Country. If it is not in the target list get first match from AKA page. if none match use title (doesnt work for Leon)
  1701. IMDB_EXTRACT_AKA_TITLE() {
  1702.     url="$1/releaseinfo#akas"
  1703.     url=$( echo "$url" | sed -r 's,//r,/r,g' )
  1704.     DEBUG "url=[$url]"
  1705.     if wget -O "$gTmpFile.aka" "$url" ; then
  1706.         countries=$(CSV_TO_EGREP "$2" )
  1707.         awk '
  1708.  
  1709. function mycountry(c) {
  1710.    return match(c,/^'"$countries"'$/);
  1711. }
  1712.  
  1713.        BEGIN { country=""; }
  1714. /<title>/ {
  1715.        title=$0 ;
  1716.        sub(/[^>]+>/,"",title);
  1717.        sub(/ +\([0-9]+.*/,"",title);
  1718.        gsub(/&[^;]+;/,"",title);
  1719.        print "DBG: default", title;
  1720.    }
  1721. /\/Recent\// {
  1722.    if (country == "") {
  1723.        country = $0;
  1724.        sub(/.*\/Recent\//,"",country);
  1725.        sub(/".*/,"",country);
  1726.        if (mycountry(country)) {
  1727.            print "DBG: RELEASED", title, country;
  1728.            print title;
  1729.            force_exit=1
  1730.            exit 0;
  1731.        } else {
  1732.            #print "DBG: Country -> " country , title;
  1733.        }
  1734.    }
  1735. }
  1736. /name="akas"/,/<\/table>/ {
  1737.    if (index($0,"<td>") > 0) {
  1738.        #print "DBG: LINE ->" $0;
  1739.        gsub(/<td>/,"");
  1740.        gsub(/<\/td>/,"");
  1741.        gsub(/&#[0-9]+;/,"?");
  1742.        if (count % 2 == 0 ) {
  1743.            #Title
  1744.            title2=$0;
  1745.            print "DBG: Title ->" title2;
  1746.        } else {
  1747.            #Country List
  1748.            split($0,countries,"/") ;
  1749.            for (c in countries) {
  1750.                gsub(/^ +/,"",countries[c]);
  1751.                gsub(/ +$/,"",countries[c]);
  1752.                print "DBG: Country->" countries[c];
  1753.                if (mycountry(countries[c])) {
  1754.                    print "DBG: AKA" title2,countries[c];
  1755.                    print title2;
  1756.                    force_exit=1
  1757.                    exit 0;
  1758.                }
  1759.            }
  1760.        }
  1761.        count++;
  1762.    }
  1763. }
  1764. END {
  1765.    if (force_exit) exit 0;
  1766.    print "DBG: Fallback ",title;
  1767.    print title;
  1768. }
  1769. ' "$gTmpFile.aka"
  1770.     fi
  1771.     rm -f "$gTmpFile.aka"
  1772. }
  1773.  
  1774. #Input = none
  1775. #Output = indb link to stdout
  1776. IMDB_EXTRACT_LINK_FROM_NFO() {
  1777.     sed -n '/imdb/ s/.*\(http[^ ]*imdb[a-zA-Z0-9/.:?&]*\).*/\1/p' *.nfo
  1778. }
  1779.  
  1780. #Input = imdb page to stdin
  1781. #Output = list of Genres to stdout. eg 'Action','Comedy','Drama'
  1782. IMDB_GET_GENRES() {
  1783.     awk -F'|' '
  1784.    
  1785.    #Find any line with href to Genre and a vertical bar.
  1786.    /^<a href="\/Sections\/Genre/ {
  1787.    
  1788.    #Remove the last hyperlink (to more keywords)
  1789.    gsub(/a> <a.*/,"a>") ;
  1790.  
  1791.    # Now get each field "<a href="/Sections/Genres/Drama/">Drama</a>" and extra the word after Genres.
  1792.    for (i=1;i<=NF;i++) {
  1793.         gsub(/.*"\/Sections\/Genres\//,"",$i);
  1794.         gsub(/\/.*/,"",$i) ;
  1795.          print $i
  1796.      }
  1797.  }'
  1798.  
  1799. }
  1800. #Input = $1 = filename containing imdb page
  1801. #Output = list of Certificates to stdout eg 'USA:R' , 'UK:PG'
  1802. IMDB_GET_CERTIFICATES() {
  1803.     if [ -z "$unpak_imdb_certificate_country_filter" ] ; then return 1 ; fi
  1804.     for country in $(echo "$unpak_imdb_certificate_country_filter" | sed 's/,/ /g') ; do
  1805.         if grep 'List?certificates' "$1" | sed 's/.*certificates=//;s/&&.*//;s/%20/ /g;s/:/-/g;' | grep "$country" ; then
  1806.             return 0
  1807.         fi
  1808.     done
  1809.     return 1
  1810. }
  1811.  
  1812. #######################################################################
  1813. # JOB CONTROL
  1814. #nzbget queues jobs in order but sometimes its useful to let short jobs
  1815. #jump the queue. So we need some kind of job control.
  1816. #Its not perfect and two jobs may start at the same time!
  1817. #TODO: Not implemented
  1818. #######################################################################
  1819. job_file="/mnt/HD_a2/tmp/unpak.job"
  1820. JOB_WAIT() {
  1821.  
  1822.     this_pid="$1"
  1823.  
  1824.     this_wait=0
  1825.     wait_inc=10
  1826.     max_wait=$(( $unpak_maximum_par_repair_minutes*2*60*60 ))
  1827.     locked=0
  1828.     while [ $locked -eq 0 -a $wait -lt $max_wait ] ; do
  1829.         if [ ! -f "$job_file" ] ; then
  1830.             if JOB_LOCK ; then return 0 ; fi
  1831.         else
  1832.             that_pid=$(cat $job_file)
  1833.             if [ "$that_pid" -eq "$this_pid" ] ; then return 0 ; fi
  1834.             if [ ! -d /proc/$that_pid ] ; then
  1835.                 #Process gone
  1836.                 if JOB_LOCK ; then return 0 ; fi
  1837.             fi
  1838.         fi
  1839.         sleep $wait_inc
  1840.         this_wait=$(( $this_wait + $wait_inc ))
  1841.     done
  1842.     return 1
  1843. }
  1844.  
  1845. JOB_LOCK() {
  1846.     this_pid="$1"
  1847.     echo "$this_pid" > "$job_file"
  1848.     return  [ "$(cat $job_file)" = "$this_pid" ]
  1849. }
  1850.  
  1851. ###################################################################################
  1852. # GENRE CONTENT FUNCTIONS
  1853. ###################################################################################
  1854. # Create Genre Folder. This will have sub folders based on Genre
  1855. # Action/Drama etc. and Certification UK:PG etc.
  1856.  
  1857. ###################################################################################
  1858. # RECENT CONTENT FUNCTIONS
  1859. ###################################################################################
  1860.  
  1861. # Create two folders $unpak_recent_dir and $unpak_recent_dir2
  1862. # These will contain hard links to recently downloaded files.
  1863. # Each time the script runs it will adjust the contents of these folders.
  1864. #
  1865. # If the user has already deleted some original content, then the
  1866. # corresponding hard links are deleted. (This is done by tracking the number
  1867. # of hard links via 'ls -l'
  1868. #
  1869. # If the user deletes the Recent link then delete the Original Content
  1870. # TODO: This should be optional.
  1871.  
  1872. # $1 = category if any
  1873. CREATE_RECENT_LINKS() {
  1874.  
  1875.     if [ "$unpak_recent_age" -eq 0 ] ; then return 0 ; fi
  1876.  
  1877.     #We dont want stuff in PIN:FOLDERS appearing in Recent folders.
  1878.     if IS_SUBDIR "$PWD" "$unpak_nmt_pin_root" ; then
  1879.         return 0;
  1880.     fi
  1881.  
  1882.  
  1883.     unpak_recent_dir=$( CREATE_FOLDER_RELATIVE_TO_DESTDIR "$unpak_recent_dir" )
  1884.     DEBUG "unpak_recent_dir=($unpak_recent_dir)"
  1885.  
  1886.     if [ "$unpak_recent_age2" -gt 0 ] ; then
  1887.         DEBUG "unpak_recent_dir2=($unpak_recent_dir2)"
  1888.         unpak_recent_dir2=$( CREATE_FOLDER_RELATIVE_TO_DESTDIR "$unpak_recent_dir2" )
  1889.         DEBUG "unpak_recent_dir2=($unpak_recent_dir2)"
  1890.     fi
  1891.  
  1892.     #Convert .ext1,.ext2,.ext3 to (\.ext1|\.ext2|\.ext3)$ for egrep
  1893.     name_clause="$( CSV_TO_EGREP "$unpak_recent_extensions")$"
  1894.     #Create duplicate hard links to all new files
  1895.     d="$PWD"
  1896.     tag="[$(echo "$1" | sed 's,/,-,g')]"
  1897.     date="$(date '+%y.%m.%d')"
  1898.     INFO "Creating hard links from [$d] using tag = [$tag]"
  1899.     #(cd "$unpak_recent_dir" ; find "$d" -type f | egrep -i "$name_clause" | EXEC_FILE_LIST "touch '\1' ; ln '\1' './$tag-\3'" "" )
  1900.     if [ -f "$gTmpFile.moved_files" ] ; then
  1901.         (cd "$unpak_recent_dir" ; egrep -i "$name_clause" "$gTmpFile.moved_files"  | EXEC_FILE_LIST "touch '\1' ; ln '\1' './${date}-\3-${tag}\4'" "" )
  1902.     fi
  1903.  
  1904.     RECENT_MOVE_OR_DELETE "$unpak_recent_age" "$unpak_recent_dir" "$unpak_recent_age2" "$unpak_recent_dir2"
  1905.     RECENT_MOVE_OR_DELETE "$unpak_recent_age2" "$unpak_recent_dir2" "" ""
  1906. }
  1907.  
  1908. CSV_TO_EGREP() {
  1909.     echo "$1" | sed 's/^/(/;s/,/|/g;s/\./\\./g;s/$/)/'
  1910. }
  1911.  
  1912. RECENT_MOVE_OR_DELETE() {
  1913.     age="$1"
  1914.     from="$2"
  1915.     age2="$3"
  1916.     to="$4"
  1917.  
  1918.     DEBUG "RECENT age=$age from [$from]  to=$to arg2=$age2"
  1919.  
  1920.     if [ -z "$age" -o "$age" -eq 0 ] ; then return 0 ; fi
  1921.  
  1922.     DEBUG "REMOVE HARD LINKS"
  1923.     #Remove hard links if original file is gone : ie with only one link ref in ls -l
  1924.     (cd "$from" ; LAST_LINK | EXEC_FILE_LIST "rm -f '\1'" "" )
  1925.  
  1926.     if [ -n "$age2" -a "$age2" -gt 0 -a -n "$to" ] ; then
  1927.         #Move older files
  1928.         cmd="mv '\1' '$to/.'"
  1929.     else
  1930.         #Delete older files
  1931.         cmd="rm -f '\1'"
  1932.     fi
  1933.     DEBUG "RECENT MANAGE HARD_LINKS from [$from] age=$age [$cmd]"
  1934.     (cd "$from" ; find . -type f -mtime +$age | EXEC_FILE_LIST "$cmd" "" )
  1935. }
  1936.  
  1937. ############################################################################
  1938. # PIN FOLDER HACK
  1939. # If a category begins with 'PIN:FOLDER' then replace that with the path
  1940. # to the pin folder. This is simply a folder burried in a heirachy of
  1941. # similarly named folders. The path to the folder is defined by
  1942. # $unpak_nmt_pin_root and the $unpak_nmt_pin
  1943. ############################################################################
  1944.  
  1945. NMT_MAKE_PIN_FOLDER() {
  1946.     INFO "CREATING PIN FOLDER"
  1947.     folders="1 2 3 4 5 6 7 8 9"
  1948.     start="$PWD"
  1949.  
  1950.     #Make the target folder.
  1951.     mkdir -p "$nmt_pin_path"
  1952.  
  1953.     #We create some dummy folders. Symlinks would have been perfect here
  1954.     # as the would create unlimited depth unfortunately
  1955.     #they dont show up in the NMT browser.
  1956.     #so we only create a subset of possible combinations. (to conserve disk space)
  1957.     cd "$nmt_pin_path"
  1958.     last_digit=1
  1959.     while [ ! "$PWD" -ef "$unpak_nmt_pin_root" -a "$PWD" != "/" ] ; do
  1960.         cd ..
  1961.         if [ $(ls | LINE_COUNT) -le 1 ] ; then
  1962.             mkdir -p $folders
  1963.             if [ $last_digit -eq 0 ]; then
  1964.                 # Create some more dummy folders in 'cousin' folders of correct letters.
  1965.                 for i in $folders ; do
  1966.                     (cd $i ; mkdir -p $folders ; cd .. )
  1967.                 done
  1968.             fi
  1969.         fi
  1970.         last_digit=0
  1971.     done
  1972.     chmod -R a+rw "$unpak_nmt_pin_root"
  1973.     cd "$start"
  1974.     INFO "DONE CREATING PIN FOLDER"
  1975. }
  1976.  
  1977. #Output = Pin Folder susbstituted
  1978. #0=substitued 1=not substitued
  1979. NMT_SUBSTITUTE_PIN_FOLDER() {
  1980.     d="$1"
  1981.  
  1982.     case "$1" in
  1983.         $unpak_nmt_pin_flag*)
  1984.             #Convert 2468 to /pin/path/2/4/6/8/
  1985.             nmt_pin_path="$unpak_nmt_pin_root/"$(echo $unpak_nmt_pin | sed 's/\(.\)/\/\1/g')  
  1986.             DEBUG "PIN FOLDER IN [$1] s*^$unpak_nmt_pin_flag*$nmt_pin_path*"
  1987.  
  1988.             #Convert $unpak_nmt_pin_flag/a/b/c to /pin/path/2/4/6/8/a/b/c
  1989.             new_d=$(echo "$1" | sed "s*^$unpak_nmt_pin_flag*$nmt_pin_path*")
  1990.             if [ "$new_d" != "$1" ] ; then
  1991.                 if [ ! -d "$nmt_pin_path" ] ; then
  1992.                     ( NMT_MAKE_PIN_FOLDER )
  1993.                 fi
  1994.                 echo "$new_d"
  1995.                 return 0
  1996.             fi
  1997.             ;;
  1998.         *)
  1999.             ;;
  2000.     esac
  2001.     echo "$1"
  2002.     return 1
  2003. }
  2004.  
  2005. CREATE_FOLDER_RELATIVE_TO_DESTDIR() {
  2006.     ( cd "$nzbget_DestDir" ; mkdir -p "$1" ; cd "$1" ; pwd )
  2007. }
  2008.  
  2009.  
  2010. #Take a list of files and return those with ">1 links
  2011. LAST_LINK() {
  2012.     ls -l | awk '$2 == 1 { gsub(/[^:]+:.. /,"") ; print }'
  2013. }
  2014.  
  2015. # Pass a list of files to some command '\1' is the file path \2=folder \3=name(without ext) \4=ext
  2016. # $2 = any shell options or "--" if none
  2017. # Leaving $2 unquoted allows ""
  2018. EXEC_FILE_LIST() {
  2019.     sep=":"
  2020.     sep2=";"
  2021.     dir="(|.*\/)"
  2022.     nameExt="([^/]+)(\.[^./]*)"
  2023.     nameNoExt="([^./]+)()" #Note must anchor with '$' when used otherwise will match extensions.
  2024.     case "$1" in *$sep*) sep="$sep2" ;; esac
  2025.     cat > $gTmpFile.exec
  2026.  
  2027.     sed -rn "s$sep^($dir$nameExt)\$$sep$1${sep}p" $gTmpFile.exec > "$gTmpFile.sh"
  2028.     sed -rn "s$sep^($dir$nameNoExt)\$$sep$1${sep}p" $gTmpFile.exec >> "$gTmpFile.sh"
  2029.  
  2030.     if [ $unpak_debug_mode -eq 1 ] ; then
  2031.         DEBUG "BEGIN FILE LIST for $1 : $2"
  2032.         LOGSTREAM INFO "sh-file" < $gTmpFile.exec
  2033.         LOGSTREAM INFO "sh-cmd" < $gTmpFile.sh
  2034.         rm -f $gTmpFile.exec
  2035.     fi
  2036.  
  2037.     ( echo 'set -e ' ; cat $gTmpFile.sh ) | sh $2
  2038.  
  2039.     rm -f $gTmpFile.sh
  2040. }
  2041. NO_RARS() { ! ls *.rar > /dev/null 2>&1 ; }
  2042. SET_PASS() { gPass=$1 ; INFO "PASS $1" ; }
  2043.  
  2044. #Uses PHP
  2045. NMT_CRC32() {
  2046.     if [ -f "$unpak_php_bin" ] ; then
  2047.         INFO "$unpak_php_bin"
  2048.         echo '<?php print ("xx".dechex(crc32(file_get_contents("'"$1"'")))) ?>' | "$unpak_php_bin" | sed -n '/^xx/ s/xx//p'
  2049.         return 0
  2050.     else
  2051.         ERROR "$unpak_php_bin not found"
  2052.         echo 0
  2053.         return 1
  2054.     fi
  2055. }
  2056.  
  2057.  
  2058.  
  2059. ##################################################################################
  2060. #Some global settings
  2061. finished_nzb_ext=".completed"
  2062. gTmpFile="/tmp/unpak.$$"
  2063. flatten="=@="
  2064. ##################################################################################
  2065. # MAIN SCRIPT
  2066. ##################################################################################
  2067. MAIN() {
  2068.     # Make a logfile
  2069.  
  2070.  
  2071.     INFO "SCRIPT_FOLDER=[$SCRIPT_FOLDER] PIN $unpak_nmt_pin"
  2072.  
  2073.     if [ $unpak_pause_nzbget -eq 1 ] ; then
  2074.         PAUSE_NZBGET
  2075.     fi
  2076.  
  2077.  
  2078.     LOG_ARGS "$@"
  2079.  
  2080.     NZB_NICE_NAME=$(GET_NICE_NZBNAME "$arg_nzb_file")
  2081.  
  2082.     CLEAR_FAKEFILES ""
  2083.     CLEAR_TMPFILES
  2084.  
  2085.     #Only run at the end of nzbjob
  2086.     if [ "$arg_nzb_state" -ne 1 ] ; then
  2087.         exit
  2088.     fi
  2089.  
  2090.     INFO
  2091.     INFO " ====== Post-process Started : $NZB_NICE_NAME $(date '+%T')======"
  2092.  
  2093.     CHECK_SETTINGS || exit 1
  2094.  
  2095.     if [ "$arg_par_fail" -ne 0 ] ; then
  2096.         ERROR "Previous par-check failed, exiting"
  2097.         NMT_FAKEFILE ERR "PAR Previous par-check failed"
  2098.         exit 1
  2099.     fi
  2100.  
  2101.     case "$arg_par_check" in
  2102.         0)
  2103.             if [ -f _brokenlog.txt -a "$external_par_check" -ne 1 ] ; then
  2104.                 ERROR "par-check is disabled or no pars present, but a rar is broken, exiting"
  2105.                 NMT_FAKEFILE ERR "UNRAR is broken"
  2106.                 exit 1
  2107.             fi
  2108.             ;;
  2109.        1) ERROR "par-check failed, exiting"
  2110.           NMT_FAKEFILE ERR "PAR check failed"
  2111.           exit 1 ;;
  2112.        2) true ;; # Checked and repaired.
  2113.        3) WARNING "Par can be repaired but repair is disabled, exiting"
  2114.           NMT_FAKEFILE WARN "PAR repair disabled"
  2115.           exit 1 ;;
  2116.     esac
  2117.  
  2118.     VERSION_CHECK
  2119.  
  2120.     gUnrarState="OK"
  2121.        
  2122.     if [ "$arg_par_check" -eq 0 -a "$external_par_check" -eq 1 ] ; then
  2123.         if ! WAITING_FOR_PARS ; then
  2124.             SET_PASS 1
  2125.             #First pass. Try to unrar.
  2126.             INFO "$SCRIPT_NAME : PASS 1"
  2127.             INIT_ALL_RAR_STATES
  2128.             if NO_RARS || ! UNRAR_ALL ; then
  2129.                 if ! UNPAUSE_PARS_AND_REPROCESS ; then
  2130.                     SET_PASS 2
  2131.                     # Cannot unpause or reprocess. Try to fix now..
  2132.                     PAR_REPAIR_ALL && UNRAR_ALL || true
  2133.                 fi
  2134.             fi
  2135.         else
  2136.             SET_PASS 2
  2137.             INFO "$SCRIPT_NAME : PASS 2"
  2138.             #Second pass. Now pars have been fetched try to repair and unrar
  2139.             CLEAR_WAITING_FOR_PARS
  2140.             PAR_REPAIR_ALL && UNRAR_ALL || true
  2141.         fi
  2142.     else
  2143.         SET_PASS 0
  2144.         #script is not processing pars. (no pars or nzbget has repaired already)
  2145.         INIT_ALL_RAR_STATES
  2146.         UNRAR_ALL || true
  2147.     fi
  2148.     chmod -R a+rw . || true
  2149.     # No logging after this point as folder is moved.
  2150.     if [ $unpak_pause_nzbget -eq 1 ] ; then
  2151.         UNPAUSE_NZBGET
  2152.     fi
  2153.     cat=
  2154.     if ! WAITING_FOR_PARS ; then
  2155.         if [ "$gUnrarState" = "OK" -a -n "$unpak_completed_dir" -a "$nzbget_AppendNzbDir" = "yes" ] ; then
  2156.             RELOCATE
  2157.         fi
  2158.     fi
  2159.     INFO "BEGIN Created Recent Links PWD=[$PWD]"
  2160.     CREATE_RECENT_LINKS "$cat"
  2161.     LOG INFO END Created Recent Links
  2162.     LOG_END "$gUnrarState"
  2163.     LOG INFO THE END
  2164. }
  2165. ###################### Parameters #####################################
  2166.  
  2167. # Parameters passed to script by nzbget:
  2168. #  1 - path to destination dir, where downloaded files are located;
  2169. #  2 - name of nzb-file processed;
  2170. #  3 - name of par-file processed (if par-checked) or empty string (if not);
  2171. #  4 - result of par-check:
  2172. #      0 - not checked: par-check disabled or nzb-file does not contain any
  2173. #          par-files;
  2174. #      1 - checked and failed to repair;
  2175. #      2 - checked and sucessfully repaired;
  2176. #      3 - checked and can be repaired but repair is disabled;
  2177. #  5 - state of nzb-job:
  2178. #      0 - there are more collections in this nzb-file queued;
  2179. #      1 - this was the last collection in nzb-file;
  2180. #  6 - indication of failed par-jobs for current nzb-file:
  2181. #      0 - no failed par-jobs;
  2182. #      1 - current par-job or any of the previous par-jobs for the
  2183. #          same nzb-files failed;
  2184. # Check if all is downloaded and repaired
  2185.  
  2186.  
  2187. TEST() {
  2188. SET_DEFAULT_SETTINGS
  2189. for i in \
  2190. "http://www.imdb.com/title/tt0060196" \
  2191. http://www.imdb.com/title/tt0167260 http://www.imdb.com/title/tt0962726/ \
  2192. http://www.imdb.com/title/tt0338512 http://www.imdb.com/title/tt0325710/ \
  2193. http://www.imdb.com/title/tt0468569 http://www.imdb.com/title/tt1034331/ \
  2194. http://www.imdb.com/title/tt0412142 http://www.imdb.com/title/tt1213574/ \
  2195. http://www.imdb.com/title/tt0446059 ; do
  2196.     wget -qO x.html "$i"
  2197.     IMDB_EXTRACT_TITLE "$i" x.html
  2198. done
  2199. exit
  2200. }
  2201.  
  2202. if [ "$#" -lt 6 ]
  2203. then
  2204.     echo "*** NZBGet post-process script ***"
  2205.     echo "This script is supposed to be called from nzbget."
  2206.     echo "usage: $0 dir nzbname parname parcheck-result nzb-job-state failed-jobs"
  2207.     #exit
  2208. fi
  2209.  
  2210. arg_download_dir="$1"  
  2211. arg_nzb_file="$2"
  2212. arg_par_check="$4"
  2213. arg_nzb_state="$5"  
  2214. arg_par_fail="$6"
  2215. arg_category="${7:-}"
  2216.  
  2217.  
  2218. SCRIPT_NAME=$(BASENAME "$0" "")
  2219.  
  2220. SCRIPT_FOLDER=$( cd $(DIRNAME "$0") ; pwd )
  2221.  
  2222. SET_DEFAULT_SETTINGS
  2223.  
  2224. MERGE_UNPAK_SETTINGS
  2225.  
  2226. cd "$arg_download_dir"
  2227.  
  2228. #AUTO_CATEGORY_FROM_IMDB ; exit # TODO DELETE
  2229.  
  2230. #TODO DELETE
  2231. #mkdir -p "$arg_download_dir.2"
  2232. #ln * "$arg_download_dir.2/."
  2233.  
  2234. MAIN "$@" 2>&1 | TEE_LOGFILES unpak.txt unpak.html
  2235. if CHECK_UNRAR_STATE ; then
  2236.   rm -f unpak.html
  2237. fi
  2238.  
  2239. #
  2240. # vi:shiftwidth=4:tabstop=4:expandtab
  2241. #
  2242. # Return status to nzbget
  2243. #  93 - post-process successful (status = SUCCESS);
  2244. #  94 - post-process failed (status = FAILURE);
  2245.  
  2246. POSTPROCESS_STATUS=0
  2247.  
  2248.     if CHECK_UNRAR_STATE ; then
  2249.         POSTPROCESS_STATUS=93
  2250.         exit $POSTPROCESS_STATUS
  2251.     else
  2252.         POSTPROCESS_STATUS=94
  2253.         exit $POSTPROCESS_STATUS
  2254.     fi
  2255.  
  2256. exit $POSTPROCESS_STATUS
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement