initBasti / Amazon2PlentySync (public) (License: GPLv3) (since 2019-01-27) (hash sha1)
Transfer your data from you Amazon Flatfile spreadsheet over to the Plentymarkets system. How to is included in the readme
List of commits:
Subject Hash Author Date (UTC)
Improve consistent coding style 4ae1b7fc32c645bcdeac919e4d8f73983a3104ab Sebastian Fricke 2020-04-29 09:34:18
Remove fixed values for specific item settings 0f2452a1bb953efcbb797c5feeb2b8f35617f537 Sebastian Fricke 2020-04-29 08:38:52
Remove fixed fba values from upload 758ca263cf88343c55af70b1e7824c76d485d7f2 Sebastian Fricke 2020-04-29 08:32:09
Remove market assign & availibilty settings fdf971d45706f8c75d9a7ea7e949193c45620051 Sebastian Fricke 2020-04-29 07:31:36
Bug Fix: product-import.py Empty TK root window 0b087f8a7902523b2495c936a6e09447a93a6cb5 Sebastian Fricke 2020-04-28 09:41:04
Add ignore file for silver searcher tool 87a1b335af1f88c10ad12e5b8f59f8207c58aaec Sebastian Fricke 2020-04-28 09:25:54
add tests folder pycache to gitignore 9d7989d1658d3dffd7eb00e9411af3dec6c85563 Sebastian Fricke 2020-04-27 12:55:24
Unit test for find_price function c19fa28ad9bf0dd1b5361e871dc498f6704cb196 Sebastian Fricke 2020-04-27 12:53:20
Remove price calculation from script d852cb3ef648336ed94daeaaa220721e6055dd7c Sebastian Fricke 2020-04-27 12:50:52
Function description priceUpload 2c5735f4f4c79607f55c77b1359aa5984d1e6699 Sebastian Fricke 2020-04-23 09:34:30
similar attribute search regardless of the parent 4bb0970408d78d25264d479428fe8c3389483215 Sebastian Fricke 2020-04-23 09:28:19
Fix marking dropdown bug 0d1695083d2b4e49fd364a36a7ef3c92a192d62f Sebastian Fricke 2020-04-23 09:26:46
update desktop specific build script for windows 8a6536d6173f7383022fab92f234ab25fc81204b Sebastian Fricke 2020-04-22 10:15:05
Refactor config handling a9be950a4e8d97fa97c7e9caff33fcbb1f476d9d Sebastian Fricke 2020-04-22 10:11:57
Fix gui/category_chooser: file change bug 9879e65c9aad9b1feb05b6121a0e33c129a8beb5 Sebastian Fricke 2020-04-22 10:09:36
update .gitignore 6c7628af605a72ced1e146c27da8639225ab9c6c Sebastian Fricke 2020-04-22 10:08:47
Fix error.py: Fix colorful messages on windows 31f0b106c7aee1962bba3efb5758f647170eceff Sebastian Fricke 2020-04-22 10:07:30
Enhanced error output to extra module dda37a22d21db1af6330fd62395682b91f46f5ec Sebastian Fricke 2020-01-27 10:56:00
Introduction infoPrint, removed unnecessary parameter 98b6779f95fcf6d3350f64e7d2a8151932415458 Sebastian Fricke 2020-01-16 14:27:15
Add Cdiscount price & coding style review 7c5451b067760100904aed947b2ba484ab9c9e45 Sebastian Fricke 2020-01-16 14:25:15
Commit 4ae1b7fc32c645bcdeac919e4d8f73983a3104ab - Improve consistent coding style
improve the coding style and linting in item_upload.py
Author: Sebastian Fricke
Author date (UTC): 2020-04-29 09:34
Committer name: Sebastian Fricke
Committer date (UTC): 2020-04-29 09:34
Parent(s): 0f2452a1bb953efcbb797c5feeb2b8f35617f537
Signing key:
Tree: 26b95926f48b918b0b06b23f359bc5982b6c1844
File Lines added Lines deleted
packages/item_upload.py 155 178
File packages/item_upload.py changed (mode: 100644) (index 4e2b357..3a96ba1)
... ... import sys
3 3 import re import re
4 4 import collections import collections
5 5 import inspect import inspect
6 import chardet
7 6 import os import os
7 import chardet
8 8 from packages import barcode, amazon, price, error from packages import barcode, amazon, price, error
9 9
10 10
 
... ... def itemUpload(flatfile, intern, stocklist, folder, input_data, filename):
38 38 position = 0 position = 0
39 39 item_price = 0 item_price = 0
40 40
41 # Get sets of all colors and sizes for each parent
42 # to find if there are some with only one attribute value for all childs
43 41 color_size_sets = {} color_size_sets = {}
44 42 color_size_sets = findSimilarAttr(flatfile, ['size_name']) color_size_sets = findSimilarAttr(flatfile, ['size_name'])
45 43
46 44 package_properties = getProperties(flatfile) package_properties = getProperties(flatfile)
47 45 group_parent = '' group_parent = ''
48 46
49 try:
50 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
51 reader = csv.DictReader(item, delimiter=";")
47 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
48 reader = csv.DictReader(item, delimiter=";")
49
50 for row in reader:
51 try:
52 keywords = ''
53 if row['generic_keywords']:
54 keywords = row['generic_keywords']
55
56 if not keywords:
57 raise barcode.EmptyFieldWarning('generic_keywords')
58
59 item_price = row['standard_price']
60 if not item_price and row['parent_child'] == 'parent':
61 item_price = price.find_price(flatfile, row['item_sku'])
62 if item_price == -1:
63 if os.name == 'nt':
64 print("press ENTER to continue..")
65 input()
66 sys.exit(1)
67 if item_price == '':
68 error.warnPrint(
69 msg=str(f"{row['item_sku']}, has no price"), err='',
70 linenumber=inspect.currentframe().f_back.f_lineno)
71
52 72
53 for row in reader:
54 try:
55 keywords = ''
56 if row['generic_keywords']:
57 keywords = row['generic_keywords']
58
59 if not keywords:
60 try:
61 raise barcode.EmptyFieldWarning('generic_keywords')
62 except Exception:
63 error.warnPrint("Generic Keywords are empty!",
64 inspect.currentframe().f_back.f_lineno)
65
66 item_price = row['standard_price']
67 if not item_price and row['parent_child'] == 'parent':
68 item_price=price.find_price(flatfile, row['item_sku'])
69 if item_price == -1:
70 if os.name == 'nt':
71 print("press ENTER to continue..")
72 input()
73 exit(1)
74 if item_price == '':
75 error.warnPrint(
76 msg=str(f"{row['item_sku']}, has no price"), err='',
77 linenumber=inspect.currentframe().f_back.f_lineno)
78
79
80 try:
81 attributes = ''
82 if row['parent_child'] == 'parent':
83 is_parent = True
84 group_parent = row['item_sku']
85 position = 0
86 if row['parent_child'] == 'child':
87 is_parent = False
88 attributes = getAttributes(dataset=row,
89 sets=color_size_sets)
90 if(group_parent and row['parent_sku'] == group_parent):
91 position += 1
92 except Exception as err:
93 error.warnPrint("Attribute setting failed",
94 sys.exc_info()[2].tb_lineno, err)
95 try:
96 values = [
97 row['parent_sku'], row['item_sku'],
98 is_parent,
99 package_properties['length'] * 10,
100 package_properties['width'] * 10,
101 package_properties['height'] * 10,
102 package_properties['weight'],
103 row['item_name'], '104',
104 attributes, position,
105 keywords,
106 input_data['name'], row['product_description'],
107 '', # externalID
108 input_data['categories'],
109 input_data['categories'][0:3], input_data['categories'][0:3],
110 '', '', # barcode
111 '', '', # market & accout id amazonsku
112 '', '', # sku & parentsku amazonsku
113 amazon.get_producttype_id(source=flatfile,
114 sku=row['item_sku']),
115 item_price, # prices
116 '', '', '', #asin
117 input_data['marking']
118 ]
119
120 except KeyError as kerr:
121 error.warnPrint('itemUpload: key not found in flatfile',
122 inspect.currentframe().f_back.f_lineno,
123 err=kerr)
124 raise KeyError
125 except Exception as err:
126 error.errorPrint("itemUpload: setting values failed", err,
127 sys.exc_info()[2].tb_lineno)
128 data[row['item_sku']] = collections.OrderedDict(zip(column_names, values))
129 except KeyError as err:
130 error.errorPrint("Reading file failed", err,
131 sys.exc_info()[2].tb_lineno)
132 return row['item_sku']
133
134 # open the intern number csv to get the item ID
135 with open(intern['path'], mode='r', encoding=intern['encoding']) as item:
136 reader = csv.DictReader(item, delimiter=";")
137 for row in reader:
138 try:
139 if row['amazon_sku'] in list(data.keys()):
140 data[row['amazon_sku']]['ExternalID'] = row['full_number']
141 except KeyError as keyerr:
142 error.warnPrint("key was not found in intern number list",
143 sys.exc_info()[2].tb_lineno, keyerr)
144
145 # Include the barcodes & asin
146 barcode_data = barcode.barcode_Upload(flatfile, stocklist)
147
148 for row in barcode_data:
149 73 try: try:
150 if row in list(data.keys()):
151 data[row]['EAN_Barcode'] = barcode_data[row]['EAN_Barcode']
152 data[row]['FNSKU_Barcode'] = barcode_data[row]['FNSKU_Barcode']
153 data[row]['ASIN-countrycode'] = barcode_data[row]['ASIN-countrycode']
154 data[row]['ASIN-type'] = barcode_data[row]['ASIN-type']
155 data[row]['ASIN-value'] = barcode_data[row]['ASIN-value']
74 attributes = ''
75 if row['parent_child'] == 'parent':
76 is_parent = True
77 group_parent = row['item_sku']
78 position = 0
79 if row['parent_child'] == 'child':
80 is_parent = False
81 attributes = getAttributes(dataset=row,
82 sets=color_size_sets)
83 if(group_parent and row['parent_sku'] == group_parent):
84 position += 1
156 85 except Exception as err: except Exception as err:
157 error.errorPrint("Barcode part for "+row, err,
158 sys.exc_info()[2].tb_lineno)
86 error.warnPrint(msg="Attribute setting failed", err='',
87 linenumber=sys.exc_info()[2].tb_lineno)
88 try:
89 values = [
90 row['parent_sku'], row['item_sku'],
91 is_parent,
92 package_properties['length'] * 10,
93 package_properties['width'] * 10,
94 package_properties['height'] * 10,
95 package_properties['weight'],
96 row['item_name'], '104',
97 attributes, position,
98 keywords,
99 input_data['name'], row['product_description'],
100 '', # externalID
101 input_data['categories'],
102 input_data['categories'][0:3], input_data['categories'][0:3],
103 '', '', # barcode
104 '', '', # market & accout id amazonsku
105 '', '', # sku & parentsku amazonsku
106 amazon.get_producttype_id(source=flatfile,
107 sku=row['item_sku']),
108 item_price, # prices
109 '', '', '', #asin
110 input_data['marking']
111 ]
159 112
160 # Include the amazonsku
161 sku_data = amazon.amazonSkuUpload(flatfile)
113 except KeyError as kerr:
114 error.warnPrint(
115 msg='itemUpload: key not found in flatfile', err=kerr,
116 linenumber=inspect.currentframe().f_back.f_lineno)
117 raise KeyError
118 except Exception as err:
119 error.errorPrint(
120 msg="itemUpload: setting values failed", err=err,
121 linenumber=sys.exc_info()[2].tb_lineno)
122 data[row['item_sku']] = collections.OrderedDict(zip(column_names, values))
123 except KeyError as err:
124 error.errorPrint(msg="Reading file failed", err=err,
125 linenumber=sys.exc_info()[2].tb_lineno)
126 return row['item_sku']
162 127
163 for row in sku_data:
128 with open(intern['path'], mode='r', encoding=intern['encoding']) as item:
129 reader = csv.DictReader(item, delimiter=";")
130 for row in reader:
164 131 try: try:
165 if row in list(data.keys()):
166 data[row]['marketid'] = sku_data[row]['MarketID']
167 data[row]['accountid'] = sku_data[row]['MarketAccountID']
168 data[row]['amazon_sku'] = sku_data[row]['SKU']
169 data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU']
170 except Exception as err:
171 error.errorPrint("SKU part for "+row, err,
172 sys.exc_info()[2].tb_lineno)
132 if row['amazon_sku'] in list(data.keys()):
133 data[row['amazon_sku']]['ExternalID'] = row['full_number']
134 except KeyError as keyerr:
135 error.warnPrint(
136 msg="key was not found in intern number list",
137 err=keyerr, linenumber=sys.exc_info()[2].tb_lineno)
138
139 barcode_data = barcode.barcode_Upload(flatfile, stocklist)
140
141 for row in barcode_data:
142 try:
143 if row in list(data.keys()):
144 data[row]['EAN_Barcode'] = barcode_data[row]['EAN_Barcode']
145 data[row]['FNSKU_Barcode'] = barcode_data[row]['FNSKU_Barcode']
146 data[row]['ASIN-countrycode'] = barcode_data[row]['ASIN-countrycode']
147 data[row]['ASIN-type'] = barcode_data[row]['ASIN-type']
148 data[row]['ASIN-value'] = barcode_data[row]['ASIN-value']
149 except Exception as err:
150 error.errorPrint(
151 msg=str(f"Barcode part for {row}"), err=err,
152 linenumber=sys.exc_info()[2].tb_lineno)
153
154 # Include the amazonsku
155 sku_data = amazon.amazonSkuUpload(flatfile)
173 156
174 # Sort the dictionary to make sure that the parents are the first variant of each item
175 sorted_data = sortProducts(data)
157 for row in sku_data:
158 try:
159 if row in list(data.keys()):
160 data[row]['marketid'] = sku_data[row]['MarketID']
161 data[row]['accountid'] = sku_data[row]['MarketAccountID']
162 data[row]['amazon_sku'] = sku_data[row]['SKU']
163 data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU']
164 except Exception as err:
165 error.errorPrint(msg=str(f"SKU part for {row}"), err=err,
166 linenumber=sys.exc_info()[2].tb_lineno)
176 167
177 barcode.writeCSV(sorted_data, "item", column_names, folder, filename)
178 except UnicodeDecodeError as err:
179 error.errorPrint("decoding problem", err,
180 sys.exc_info()[2].tb_lineno)
181 if os.name == 'nt':
182 print("press ENTER to continue..")
183 input()
184 exit(1)
168 # Sort the dictionary to make sure that the parents are the first variant of each item
169 sorted_data = sortProducts(data)
170
171 barcode.writeCSV(sorted_data, "item", column_names, folder, filename)
185 172
186 173 def itemPropertyUpload(flatfile, folder, filename): def itemPropertyUpload(flatfile, folder, filename):
187 174
 
... ... def itemPropertyUpload(flatfile, folder, filename):
224 211
225 212 for row in reader: for row in reader:
226 213 if row['parent_child'] == 'parent': if row['parent_child'] == 'parent':
227 try:
228 use_names =\
229 [i for i in property_names if i in list(row.keys())]
230 values = [row[i] for i in use_names]
231 except ValueError as err:
232 error.warnPrint("No Value",
233 sys.exc_info()[2].tb_lineno, err)
214 use_names =\
215 [i for i in property_names if i in list(row.keys())]
216 values = [row[i] for i in use_names]
234 217
235 218 # Check for empty values # Check for empty values
236 219 properties[row['item_sku']] = dict(zip(use_names, values)) properties[row['item_sku']] = dict(zip(use_names, values))
 
... ... def itemPropertyUpload(flatfile, folder, filename):
245 228
246 229 data[row + prop] = dict(zip(column_names, values)) data[row + prop] = dict(zip(column_names, values))
247 230 except KeyError as kerr: except KeyError as kerr:
248 error.errorPrint("Key was not found in the flatfile", kerr,
249 sys.exc_info()[2].tb_lineno)
231 error.errorPrint(
232 msg="Key was not found in the flatfile", err=kerr,
233 linenumber=sys.exc_info()[2].tb_lineno)
250 234
251 235
252 236 barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename) barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename)
 
... ... def getProperties(flatfile):
292 276 except KeyError as err: except KeyError as err:
293 277 msg = str(f"getProperties key: {err} not found") msg = str(f"getProperties key: {err} not found")
294 278 error.errorPrint(msg=msg, err='', error.errorPrint(msg=msg, err='',
295 linenumber=sys.exc_info()[2].tb_lineno)
279 linenumber=sys.exc_info()[2].tb_lineno)
296 280 except Exception as err: except Exception as err:
297 281 error.errorPrint( error.errorPrint(
298 282 msg="getProperties setting values failed", err=err, msg="getProperties setting values failed", err=err,
 
... ... def getAttributes(dataset, sets):
324 308 print("{0} not found in {1}" print("{0} not found in {1}"
325 309 .format(dataset['parent_sku'], ','.join(list(sets.keys())))) .format(dataset['parent_sku'], ','.join(list(sets.keys()))))
326 310 except Exception as err: except Exception as err:
327 error.errorPrint("Adding of color attribute failed", err,
328 sys.exc_info()[2].tb_lineno)
311 error.errorPrint(msg="Adding color attribute failed", err=err,
312 linenumber=sys.exc_info()[2].tb_lineno)
329 313 try: try:
330 314 if len(sets[dataset['parent_sku']]['size_name']) > 1: if len(sets[dataset['parent_sku']]['size_name']) > 1:
331 315 if not output_string: if not output_string:
 
... ... def getAttributes(dataset, sets):
333 317 else: else:
334 318 output_string = output_string + ';size_name:' + dataset['size_name'] output_string = output_string + ';size_name:' + dataset['size_name']
335 319 except Exception as err: except Exception as err:
336 error.errorPrint("Adding of size attribute failed", err,
337 sys.exc_info()[2].tb_lineno)
320 error.errorPrint(msg="Adding size attribute failed", err=err,
321 linenumber=sys.exc_info()[2].tb_lineno)
338 322 return output_string return output_string
339 323
340 324 def findSimilarAttr(flatfile, attribute): def findSimilarAttr(flatfile, attribute):
 
... ... def checkFlatfile(flatfile):
405 389
406 390 first_row = [*list(reader)[0]] first_row = [*list(reader)[0]]
407 391 if len(first_row) == 1: if len(first_row) == 1:
408 error.errorPrint("Wrong delimiter, use ';'",
409 'False delimiter detected',
410 inspect.currentframe().f_back.f_lineno)
392 error.errorPrint(
393 msg='False delimiter detected',
394 err="Wrong delimiter, use ';'",
395 linenumber=inspect.currentframe().f_back.f_lineno)
411 396 return False return False
412 397
413 398 if not 'feed_product_type' in first_row: if not 'feed_product_type' in first_row:
414 399 if 'Marke' in first_row: if 'Marke' in first_row:
415 error.errorPrint("Only use the last of the 3 header lines",
416 err='',
417 linenumber=inspect.currentframe()
418 .f_back.f_lineno)
400 error.errorPrint(
401 msg="Only use the last of the 3 header lines", err='',
402 linenumber=inspect.currentframe().f_back.f_lineno)
419 403 print("Please cut the first two rows from the flatfile for this script\n") print("Please cut the first two rows from the flatfile for this script\n")
420 404 return False return False
421 error.errorPrint("Wrong header line", err='',
422 linenumber=inspect.currentframe().f_back.f_lineno)
405 error.errorPrint(
406 msg="Wrong header line", err='',
407 linenumber=inspect.currentframe().f_back.f_lineno)
423 408 return False return False
424 409 return True return True
425 410
426 411 except Exception as err: except Exception as err:
427 error.warnPrint("Flatfile check failed",
428 sys.exc_info()[2].tb_lineno, err)
412 error.warnPrint(msg="Flatfile check failed", err=err,
413 linenumber=sys.exc_info()[2].tb_lineno)
429 414
430 415 def checkEncoding(file_dict): def checkEncoding(file_dict):
431 416 try: try:
 
... ... def checkEncoding(file_dict):
434 419 raw_data = item.read() raw_data = item.read()
435 420 except Exception as err: except Exception as err:
436 421 print("ERROR: {0}\n".format(err)) print("ERROR: {0}\n".format(err))
437 error.errorPrint("check Encoding reading failed", err,
438 sys.exc_info()[2].tb_lineno)
422 error.errorPrint(msg="check Encoding reading failed", err=err,
423 linenumber=sys.exc_info()[2].tb_lineno)
439 424 file_dict['encoding'] = chardet.detect(raw_data)['encoding'] file_dict['encoding'] = chardet.detect(raw_data)['encoding']
440 425 print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data)))
441 426
442 427 except Exception as err: except Exception as err:
443 error.errorPrint("check Encoding failed", err,
444 sys.exc_info()[2].tb_lineno)
428 error.errorPrint(msg="check Encoding failed", err=err,
429 linenumber=sys.exc_info()[2].tb_lineno)
445 430
446 431 return file_dict return file_dict
447 432
 
... ... def getVariationId(exportfile, sku):
457 442 if row['VariationNo'] == sku: if row['VariationNo'] == sku:
458 443 variationid = row['VariationId'] variationid = row['VariationId']
459 444 continue continue
460 try:
461 if row[list(row.keys())[1]] == sku:
462 for i in range(len(list(row.keys()))):
463 # matches .id .ID _ID _id ID id
464 if re.search(r'\bid', [*row][i].lower()):
465 print("found ID in {0} value: {1}"
466 .format(list(row.keys())[i],
467 row[list(row.keys())[i]]))
468 variationid = row[list(row.keys())[i]]
469 except Exception as err:
470 error.errorPrint("Looking for irregularities in getVariationId",
471 err, sys.exc_info()[2].tb_lineno)
472 if os.name == 'nt':
473 print("press ENTER to continue...")
474 input()
475 exit(1)
445 if row[list(row.keys())[1]] == sku:
446 for i in range(len(list(row.keys()))):
447 # matches .id .ID _ID _id ID id
448 if re.search(r'\bid', [*row][i].lower()):
449 print("found ID in {0} value: {1}"
450 .format(row.keys()[i], row[row.keys()[i]]))
451 variationid = row[list(row.keys())[i]]
476 452 if not variationid: if not variationid:
477 error.warnPrint(msg="No Variation ID found for "+sku,
478 linenumber=inspect.currentframe().f_back.f_lineno)
453 error.warnPrint(
454 msg=str(f"No Variation ID found for {sku}"), err='',
455 linenumber=inspect.currentframe().f_back.f_lineno)
479 456
480 457 return variationid return variationid
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using git:
git clone git://git.rocketgit.com/user/initBasti/Amazon2PlentySync

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main