Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/python
- import json
- import psycopg2
- import sys
- import os
- glob_single_id_found_counter = 0
- def write_to_database(order_id, product_id):
- assert type(order_id) is str, "order_id var in sql function must be a string."
- assert type(product_id) is str, "product_id var in sql function must be a string."
- # Ask if ids are all same length ?
- assert len(order_id) == 12, "order_id var in sql function must be 12 chars."
- assert len(product_id) == 12, "order_id var in sql function must be 12 chars."
- try:
- conn_data = "host='localhost'" \
- "dbname='postgres'" \
- "user='postgres'" \
- "password='123456'"
- conn = psycopg2.connect(conn_data)
- cursor = conn.cursor()
- cursor.execute("select orderId from my_table where orderId=%s;", (order_id,))
- row_exist = cursor.fetchall()
- if not row_exist:
- cursor.execute("INSERT INTO my_table VALUES (%s, %s)", (order_id, product_id,))
- #print("\nTEST: orderId:", order_id,)
- #print("TEST: productId:", product_id,)
- print("was added to the database.")
- conn.commit()
- cursor.close()
- conn.close()
- except Exception as Error:
- print(Error)
- print("Unable to connect to the database.")
- cursor.close()
- conn.close()
- def watch_for_single_product_id(line):
- if type(line) is int or len(line) == 0:
- return False
- global glob_single_id_found_counter
- # decode string-line to json
- try:
- line = (line.split("{"))[1].split("}")[0]
- line = line.replace("u'", '"')
- line = line.replace("'", '"')
- line = "{" + line + "}"
- line = json.loads(line)
- except IndexError:
- # line does not fit to our decoder
- return False
- # check for single productId
- if (len(line["productId"])) == 1:
- print("\norderId:\t", line["orderId"])
- print("productId:\t", line["productId"][0])
- glob_single_id_found_counter += 1
- # pass values to sql function
- write_to_database(line["orderId"], line["productId"][0])
- return True
- def open_log(files):
- global glob_single_id_found_counter
- """
- if os.path.exists(files): # if no list then check for relative path
- return True
- else:
- return False
- """
- if len(files) > 1:
- for file in files:
- print("\ninput:", file)
- for line in open(files[0]):
- watch_for_single_product_id(line)
- else:
- print("\ninput:", files[0])
- for line in open(files[0]):
- watch_for_single_product_id(line)
- print("\nsingle productIds found:", glob_single_id_found_counter)
- return True
- def test_watch_for_single_product_id():
- good_test_line = """"2015-09-17 11:00:09,621 - adnymics.BrochureGeneration.JobGenerator - DEBUG: Checking args for (re-)generating brochure: {u'orderId': u'123456789000', u'productId': [u'123456789003']}"""""
- bad_test_lines = (
- "something something",
- 123456789,
- "",
- )
- assert watch_for_single_product_id(good_test_line) == True
- for line in bad_test_lines:
- assert watch_for_single_product_id(line) == False
- def test_open_log(tmpdir):
- p = tmpdir.join("data.log")
- p.write("""
- "2015-09-17 11:00:09,621 - adnymics.BrochureGeneration.JobGenerator - DEBUG: Checking args for (re-)generating brochure: {u'orderId': u'123456789000', u'productId': [u'123456789001', u'123456789002', u'123456789003']}"
- "2015-09-17 11:00:09,621 - adnymics.BrochureGeneration.JobGenerator - DEBUG: Checking args for (re-)generating brochure: {u'orderId': u'123456789000', u'productId': [u'123456789003']}"
- """)
- assert type(p.read()) is str, "log is not a string: "
- #test = str(tmpdir)
- #file = str(os.path.join(test, 'data.log')) # relative path
- ##assert open_log(file) == True, "test"
- # ---------
- # SQL Debug
- #
- def show_sql():
- try:
- conn_data = "host='localhost'" \
- "dbname='postgres'" \
- "user='postgres'" \
- "password='123456'"
- conn = psycopg2.connect(conn_data)
- cursor = conn.cursor()
- cursor.execute("""SELECT orderId, productId FROM my_table;""")
- for row in cursor.fetchall():
- print(row)
- except Exception as Error:
- print(Error)
- #
- # SQL Debug
- # ---------
- if __name__ == "__main__":
- open_log(sys.argv[1:])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement