Advertisement
Guest User

Untitled

a guest
Dec 12th, 2019
99
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.45 KB | None | 0 0
  1. import os.path
  2. import datetime
  3. import argparse
  4. from scripts.conversion_news import read_news
  5. from scripts.conversion_news import unpack_json
  6. from scripts.conversion_news import unpack_news
  7. from scripts.conversion_news import unpack_with_date
  8. from scripts.create_xml import create_fb2
  9. from scripts.rss_parser import find_news
  10. from scripts.write_logs import write_log
  11. import logging
  12.  
  13.  
  14. def valid_date(date):
  15. try:
  16. return datetime.datetime.strptime(date, '%Y%m%d')
  17. except ValueError:
  18. msg = "Not a valid date: '{0}'.".format(
  19. date) # s ? не записывали дату нигде же date? # Да. криво скопировал с инета
  20.  
  21. raise argparse.ArgumentTypeError(
  22. msg) # не читал про ошибку аргпарса даже( точно, через точку, удобнее без точек
  23.  
  24.  
  25. def parse_arguments():
  26. parser = argparse.ArgumentParser(description='Pure Python command-line RSS reader')
  27. parser.add_argument('source', help='RRS URL', nargs='?')
  28. parser.add_argument('--verbose', action='store_true', help='Outputs verbose status message')
  29. parser.add_argument('--version', action='version', version='%(prog)s 0.4', help='Print version info')
  30. parser.add_argument('--json', action='store_true', help='Print result as JSON in stdout')
  31. parser.add_argument('--limit', type=int, help='Limit topics if this parameter provided')
  32. parser.add_argument('--date', type=valid_date,
  33. help='Print news by publication date') # Обрати внимание на type. Думаю должно сработать и argparse сам провалидирует args.date. Тоесть ты сможешь сразу получить год с помощью args.date.year
  34. parser.add_argument('--output-path', dest='output_path', type=str, help='Path to save fb2')
  35. parser.add_argument('--to-fb2', dest='to_fb2', action='store_true', help='Conversion news in fb.2')
  36.  
  37. return parser.parse_args()
  38.  
  39.  
  40. def validate_arguments(args):
  41. if args.limit is not None and args.limit < 0: ## if args.limit is not None and args.limit < 0
  42. print('Limit must be >0')
  43. return False
  44. if args.json and args.output_path is not None:
  45. print('ArgumentsError: You should use --json without --out-path')
  46. return False
  47. if args.source is not None and args.source == '':
  48. print('You should give the rss_url')
  49. return False
  50. if args.output_path and not args.to_fb2: ##!!!!!
  51. print('You should use --output-path with --to-fb2')
  52. return False
  53. return True
  54.  
  55.  
  56. def process_arguments(args):
  57. news = None
  58. max_limit = None
  59. if args.source == 'date':
  60. if args.date:
  61. news, max_limit = retrieve_from_cache(args.date)
  62. if args.verbose:
  63. logging.debug(args,news,max_limit)
  64.  
  65. else:
  66. try:
  67. news, title, max_limit = retrieve_from_url(
  68. args.source)
  69. except TypeError:
  70. pass
  71. if args.to_fb2:
  72. dump_news_to_fb2(news, args.output_path)
  73. if args.verbose:
  74. logging.debug(args, news, max_limit)
  75.  
  76. else:
  77. dump_news_to_stdout(news, args.limit, max_limit, as_json=args.json)
  78. if args.verbose:
  79. logging.debug(args, news, max_limit)
  80.  
  81.  
  82. def dump_news_to_stdout(news, limit, max_limit, as_json=None,
  83. title=None):
  84. if as_json:
  85. if limit:
  86. if max_limit < limit:
  87. print('{0} news on site'.format(max_limit))
  88. else:
  89. unpack_json(news, limit)
  90. else:
  91. unpack_json(news, max_limit)
  92. else:
  93. if limit:
  94. if max_limit < limit:
  95. print('{0} on site'.format(max_limit))
  96. else:
  97. unpack_news(news, limit, title)
  98. else:
  99. unpack_news(news, max_limit, title)
  100.  
  101.  
  102. def dump_news_to_fb2(news, output_path):
  103. create_fb2(news, output_path)
  104.  
  105.  
  106. def retrieve_from_url(url):
  107. try:
  108. news, title = find_news(url)
  109. except TypeError:
  110. pass
  111. else:
  112. max_limit = len(news)
  113. return news, title, max_limit
  114.  
  115.  
  116. def retrieve_from_cache(date=None):
  117. first_news = read_news()
  118. max_limit = len(first_news)
  119. if date:
  120. news, limit = unpack_with_date(user_date=date, news=first_news)
  121. return news, limit
  122. else:
  123. return first_news, max_limit
  124.  
  125.  
  126. def retrieve_json(url, limit=None):
  127. NEWS_INDEX = 0
  128. try:
  129. news = dict(find_news(url)[NEWS_INDEX])
  130. max_limit = len(news)
  131. except TypeError:
  132. pass
  133. else:
  134. if limit:
  135. news = unpack_json(news, limit)
  136. max_limit = len(news)
  137. return news, max_limit
  138.  
  139. else:
  140. max_limit = len(news)
  141. return news, max_limit
  142.  
  143.  
  144. def retrieve_logs():
  145. LOGS_FILE_NAME = 'logs.txt'
  146. with open(LOGS_FILE_NAME, 'r') as file:
  147. data = file.read()
  148. print(data)
  149.  
  150.  
  151. def conversion_to_fb2_with_url(url, path=None):
  152. NEWS_INDEX = 0
  153. news = dict(find_news(url)[NEWS_INDEX])
  154. return news, path
  155.  
  156.  
  157. def main():
  158. args = parse_arguments()
  159. logging.basicConfig(level=logging.DEBUG)
  160. logging.getLogger("urllib3").setLevel(logging.WARNING)
  161. if not validate_arguments(args):
  162. return -1
  163. process_arguments(args)
  164.  
  165.  
  166. if __name__ == '__main__':
  167. main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement