initBasti / Amazon2PlentySync (public) (License: GPLv3) (since 2019-01-27) (hash sha1)
Transfer your data from you Amazon Flatfile spreadsheet over to the Plentymarkets system. How to is included in the readme
List of commits:
Subject Hash Author Date (UTC)
Improve coding style and linting d3d1d6de52f0f5530ec200aae082ac0ec99ddd70 Sebastian Fricke 2020-04-29 10:08:21
Remove fixed values for specific item settings 0f2452a1bb953efcbb797c5feeb2b8f35617f537 Sebastian Fricke 2020-04-29 08:38:52
Remove fixed fba values from upload 758ca263cf88343c55af70b1e7824c76d485d7f2 Sebastian Fricke 2020-04-29 08:32:09
Remove market assign & availibilty settings fdf971d45706f8c75d9a7ea7e949193c45620051 Sebastian Fricke 2020-04-29 07:31:36
Remove manual upload of extern id & export files 66d60bc9243173f256b01071f203849edf43546d Sebastian Fricke 2020-04-28 13:55:27
Bug Fix: product-import.py Empty TK root window 0b087f8a7902523b2495c936a6e09447a93a6cb5 Sebastian Fricke 2020-04-28 09:41:04
Add ignore file for silver searcher tool 87a1b335af1f88c10ad12e5b8f59f8207c58aaec Sebastian Fricke 2020-04-28 09:25:54
add tests folder pycache to gitignore 9d7989d1658d3dffd7eb00e9411af3dec6c85563 Sebastian Fricke 2020-04-27 12:55:24
Unit test for find_price function c19fa28ad9bf0dd1b5361e871dc498f6704cb196 Sebastian Fricke 2020-04-27 12:53:20
Remove price calculation from script d852cb3ef648336ed94daeaaa220721e6055dd7c Sebastian Fricke 2020-04-27 12:50:52
Function description priceUpload 2c5735f4f4c79607f55c77b1359aa5984d1e6699 Sebastian Fricke 2020-04-23 09:34:30
similar attribute search regardless of the parent 4bb0970408d78d25264d479428fe8c3389483215 Sebastian Fricke 2020-04-23 09:28:19
Fix marking dropdown bug 0d1695083d2b4e49fd364a36a7ef3c92a192d62f Sebastian Fricke 2020-04-23 09:26:46
update desktop specific build script for windows 8a6536d6173f7383022fab92f234ab25fc81204b Sebastian Fricke 2020-04-22 10:15:05
Refactor config handling a9be950a4e8d97fa97c7e9caff33fcbb1f476d9d Sebastian Fricke 2020-04-22 10:11:57
Fix gui/category_chooser: file change bug 9879e65c9aad9b1feb05b6121a0e33c129a8beb5 Sebastian Fricke 2020-04-22 10:09:36
update .gitignore 6c7628af605a72ced1e146c27da8639225ab9c6c Sebastian Fricke 2020-04-22 10:08:47
Fix error.py: Fix colorful messages on windows 31f0b106c7aee1962bba3efb5758f647170eceff Sebastian Fricke 2020-04-22 10:07:30
Enhanced error output to extra module dda37a22d21db1af6330fd62395682b91f46f5ec Sebastian Fricke 2020-01-27 10:56:00
Introduction infoPrint, removed unnecessary parameter 98b6779f95fcf6d3350f64e7d2a8151932415458 Sebastian Fricke 2020-01-16 14:27:15
Commit d3d1d6de52f0f5530ec200aae082ac0ec99ddd70 - Improve coding style and linting
remove broad Exceptions, improve coding style
Author: Sebastian Fricke
Author date (UTC): 2020-04-29 10:08
Committer name: Sebastian Fricke
Committer date (UTC): 2020-04-29 10:08
Parent(s): 3b0227217ff9cf323fc0e91612f8f9d583d1098c
Signing key:
Tree: d66f5a14d55edfb550f45a2ee9079c6c9933ccb8
File Lines added Lines deleted
packages/item_upload.py 142 164
File packages/item_upload.py changed (mode: 100644) (index b416e3d..be45975)
1 1 import csv import csv
2 2 import sys import sys
3 import re
4 3 import collections import collections
5 4 import inspect import inspect
6 import chardet
7 5 import os import os
6 import chardet
8 7 import pandas import pandas
9 8 import xlrd import xlrd
10 9 from packages import barcode, amazon, price, error from packages import barcode, amazon, price, error
 
... ... def itemUpload(flatfile, intern, stocklist, folder, input_data, filename):
48 47 package_properties = getProperties(flatfile) package_properties = getProperties(flatfile)
49 48 group_parent = '' group_parent = ''
50 49
51 try:
52 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
53 reader = csv.DictReader(item, delimiter=";")
50 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
51 reader = csv.DictReader(item, delimiter=";")
54 52
55 for row in reader:
53 for row in reader:
54 keywords = ''
55 if row['generic_keywords']:
56 keywords = row['generic_keywords']
57
58 if not keywords:
56 59 try: try:
57 keywords = ''
58 if row['generic_keywords']:
59 keywords = row['generic_keywords']
60
61 if not keywords:
62 try:
63 raise barcode.EmptyFieldWarning('generic_keywords')
64 except Exception:
65 error.warnPrint("Generic Keywords are empty!",
66 inspect.currentframe().f_back.f_lineno)
67
68 item_price = row['standard_price']
69 if not item_price and row['parent_child'] == 'parent':
70 item_price=price.find_price(flatfile, row['item_sku'])
71 if item_price == -1:
72 if os.name == 'nt':
73 print("press ENTER to continue..")
74 input()
75 exit(1)
76 if item_price == '':
77 error.warnPrint(
78 msg=str(f"{row['item_sku']}, has no price"), err='',
79 linenumber=inspect.currentframe().f_back.f_lineno)
80
81
82 try:
83 attributes = ''
84 if row['parent_child'] == 'parent':
85 is_parent = True
86 group_parent = row['item_sku']
87 position = 0
88 if row['parent_child'] == 'child':
89 is_parent = False
90 attributes = getAttributes(dataset=row,
91 sets=color_size_sets)
92 if(group_parent and row['parent_sku'] == group_parent):
93 position += 1
94 except Exception as err:
95 error.warnPrint("Attribute setting failed",
96 sys.exc_info()[2].tb_lineno, err)
97 try:
98 values = [
99 row['parent_sku'], row['item_sku'],
100 is_parent,
101 package_properties['length'] * 10,
102 package_properties['width'] * 10,
103 package_properties['height'] * 10,
104 package_properties['weight'],
105 row['item_name'], '104',
106 attributes, position,
107 keywords,
108 input_data['name'], row['product_description'],
109 '', # externalID
110 input_data['categories'],
111 input_data['categories'][0:3],
112 input_data['categories'][0:3],
113 input_data['categories'],
114 input_data['categories'][0:3], input_data['categories'][0:3],
115 '', '', # barcode
116 '', '', # market & accout id amazonsku
117 '', '', # sku & parentsku amazonsku
118 amazon.get_producttype_id(source=flatfile,
119 sku=row['item_sku']),
120 item_price, # prices
121 '', '', '', #asin
122 input_data['marking']
123 ]
124
125 except KeyError as kerr:
126 error.warnPrint('itemUpload: key not found in flatfile',
127 inspect.currentframe().f_back.f_lineno,
128 err=kerr)
129 raise KeyError
130 except Exception as err:
131 error.errorPrint("itemUpload: setting values failed", err,
132 sys.exc_info()[2].tb_lineno)
133
134 data[row['item_sku']] =\
135 collections.OrderedDict(zip(column_names, values))
136 except KeyError as err:
137 error.errorPrint("Reading file failed", err,
138 sys.exc_info()[2].tb_lineno)
139 return row['item_sku']
60 raise barcode.EmptyFieldWarning('generic_keywords')
61 except Exception:
62 error.warnPrint(
63 msg="Generic Keywords are empty!", err='',
64 linenumber=inspect.currentframe().f_back.f_lineno)
65
66 item_price = row['standard_price']
67 if not item_price and row['parent_child'] == 'parent':
68 item_price = price.find_price(flatfile, row['item_sku'])
69 if item_price == -1:
70 if os.name == 'nt':
71 print("press ENTER to continue..")
72 input()
73 sys.exit(1)
74 if item_price == '':
75 error.warnPrint(
76 msg=str(f"{row['item_sku']},has no price"), err='',
77 linenumber=inspect.currentframe().f_back.f_lineno)
78
79
80 try:
81 attributes = ''
82 if row['parent_child'] == 'parent':
83 is_parent = True
84 group_parent = row['item_sku']
85 position = 0
86 if row['parent_child'] == 'child':
87 is_parent = False
88 attributes = getAttributes(dataset=row,
89 sets=color_size_sets)
90 if(group_parent and row['parent_sku'] == group_parent):
91 position += 1
92 except Exception as err:
93 error.warnPrint(msg="Attribute setting failed", err=err,
94 linenumber=sys.exc_info()[2].tb_lineno)
95 try:
96 values = [
97 row['parent_sku'], row['item_sku'],
98 is_parent,
99 package_properties['length'] * 10,
100 package_properties['width'] * 10,
101 package_properties['height'] * 10,
102 package_properties['weight'],
103 row['item_name'], '104',
104 attributes, position,
105 keywords,
106 input_data['name'], row['product_description'],
107 '', # externalID
108 input_data['categories'],
109 input_data['categories'][0:3],
110 input_data['categories'][0:3],
111 input_data['categories'],
112 input_data['categories'][0:3], input_data['categories'][0:3],
113 '', '', # barcode
114 '', '', # market & accout id amazonsku
115 '', '', # sku & parentsku amazonsku
116 amazon.get_producttype_id(source=flatfile,
117 sku=row['item_sku']),
118 item_price, # prices
119 '', '', '', #asin
120 input_data['marking']
121 ]
122
123 except KeyError as kerr:
124 error.warnPrint(
125 msg='column name not found in flatfile',
126 err=kerr,
127 linenumber=inspect.currentframe().f_back.f_lineno)
128 raise KeyError
129 except Exception as err:
130 error.errorPrint(msg="setting values failed", err=err,
131 linenumber=sys.exc_info()[2].tb_lineno)
132
133 data[row['item_sku']] =\
134 collections.OrderedDict(zip(column_names, values))
140 135
141 136 # open the intern number xlsx to get the external id # open the intern number xlsx to get the external id
142 137 get_externalid(dataset=data, numberlist=intern) get_externalid(dataset=data, numberlist=intern)
 
... ... def itemUpload(flatfile, intern, stocklist, folder, input_data, filename):
152 147 data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] data[row]['ASIN-type'] = barcode_data[row]['ASIN-type']
153 148 data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] data[row]['ASIN-value'] = barcode_data[row]['ASIN-value']
154 149 except Exception as err: except Exception as err:
155 error.errorPrint("Barcode part for "+row, err,
156 sys.exc_info()[2].tb_lineno)
150 error.errorPrint(
151 msg=str(f"Barcode part for {row}"), err=err,
152 linenumber=sys.exc_info()[2].tb_lineno)
157 153
158 154 # Include the amazonsku # Include the amazonsku
159 155 sku_data = amazon.amazonSkuUpload(flatfile) sku_data = amazon.amazonSkuUpload(flatfile)
 
... ... def itemUpload(flatfile, intern, stocklist, folder, input_data, filename):
166 162 data[row]['amazon_sku'] = sku_data[row]['SKU'] data[row]['amazon_sku'] = sku_data[row]['SKU']
167 163 data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU']
168 164 except Exception as err: except Exception as err:
169 error.errorPrint("SKU part for "+row, err,
170 sys.exc_info()[2].tb_lineno)
165 error.errorPrint(
166 msg=str(f"SKU part for {row}"), err=err,
167 linenumber=sys.exc_info()[2].tb_lineno)
171 168
172 169 # Sort the dictionary to make sure that the parents are the first variant of each item # Sort the dictionary to make sure that the parents are the first variant of each item
173 170 sorted_data = sortProducts(data) sorted_data = sortProducts(data)
174 171
175 172 barcode.writeCSV(sorted_data, "item", column_names, folder, filename) barcode.writeCSV(sorted_data, "item", column_names, folder, filename)
176 except UnicodeDecodeError as err:
177 error.errorPrint("decoding problem", err,
178 sys.exc_info()[2].tb_lineno)
179 if os.name == 'nt':
180 print("press ENTER to continue..")
181 input()
182 exit(1)
183 173
184 174 def itemPropertyUpload(flatfile, folder, filename): def itemPropertyUpload(flatfile, folder, filename):
185 175
 
... ... def itemPropertyUpload(flatfile, folder, filename):
222 212
223 213 for row in reader: for row in reader:
224 214 if row['parent_child'] == 'parent': if row['parent_child'] == 'parent':
225 try:
226 use_names =\
227 [i for i in property_names if i in list(row.keys())]
228 values = [row[i] for i in use_names]
229 except ValueError as err:
230 error.warnPrint("No Value",
231 sys.exc_info()[2].tb_lineno, err)
215 use_names =\
216 [i for i in property_names if i in list(row.keys())]
217 values = [row[i] for i in use_names]
232 218
233 219 # Check for empty values # Check for empty values
234 220 properties[row['item_sku']] = dict(zip(use_names, values)) properties[row['item_sku']] = dict(zip(use_names, values))
 
... ... def itemPropertyUpload(flatfile, folder, filename):
243 229
244 230 data[row + prop] = dict(zip(column_names, values)) data[row + prop] = dict(zip(column_names, values))
245 231 except KeyError as kerr: except KeyError as kerr:
246 error.errorPrint("Key was not found in the flatfile", kerr,
247 sys.exc_info()[2].tb_lineno)
232 error.errorPrint(
233 msg="Key was not found in the flatfile", err=kerr,
234 linenumber=sys.exc_info()[2].tb_lineno)
248 235
249 236
250 237 barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename) barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename)
 
... ... def getProperties(flatfile):
290 277 except KeyError as err: except KeyError as err:
291 278 msg = str(f"getProperties key: {err} not found") msg = str(f"getProperties key: {err} not found")
292 279 error.errorPrint(msg=msg, err='', error.errorPrint(msg=msg, err='',
293 linenumber=sys.exc_info()[2].tb_lineno)
294 except Exception as err:
295 error.errorPrint(
296 msg="getProperties setting values failed", err=err,
297 linenumber=sys.exc_info()[2].tb_lineno)
280 linenumber=sys.exc_info()[2].tb_lineno)
298 281
299 282 return properties return properties
300 283
 
... ... def getAttributes(dataset, sets):
322 305 print("{0} not found in {1}" print("{0} not found in {1}"
323 306 .format(dataset['parent_sku'], ','.join(list(sets.keys())))) .format(dataset['parent_sku'], ','.join(list(sets.keys()))))
324 307 except Exception as err: except Exception as err:
325 error.errorPrint("Adding of color attribute failed", err,
326 sys.exc_info()[2].tb_lineno)
308 error.errorPrint(
309 msg="Adding color attribute failed", err=err,
310 linenumber=sys.exc_info()[2].tb_lineno)
327 311 try: try:
328 312 if len(sets[dataset['parent_sku']]['size_name']) > 1: if len(sets[dataset['parent_sku']]['size_name']) > 1:
329 313 if not output_string: if not output_string:
 
... ... def getAttributes(dataset, sets):
331 315 else: else:
332 316 output_string = output_string + ';size_name:' + dataset['size_name'] output_string = output_string + ';size_name:' + dataset['size_name']
333 317 except Exception as err: except Exception as err:
334 error.errorPrint("Adding of size attribute failed", err,
335 sys.exc_info()[2].tb_lineno)
318 error.errorPrint(
319 msg="Adding of size attribute failed", err=err,
320 linenumber=sys.exc_info()[2].tb_lineno)
336 321 return output_string return output_string
337 322
338 323 def findSimilarAttr(flatfile, attribute): def findSimilarAttr(flatfile, attribute):
 
... ... def searchChild(item_list, parent):
397 382 return child_dict return child_dict
398 383
399 384 def checkFlatfile(flatfile): def checkFlatfile(flatfile):
400 try:
401 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
402 reader = csv.DictReader(item, delimiter=';')
403
404 first_row = [*list(reader)[0]]
405 if len(first_row) == 1:
406 error.errorPrint("Wrong delimiter, use ';'",
407 'False delimiter detected',
408 inspect.currentframe().f_back.f_lineno)
409 return False
410
411 if not 'feed_product_type' in first_row:
412 if 'Marke' in first_row:
413 error.errorPrint("Only use the last of the 3 header lines",
414 err='',
415 linenumber=inspect.currentframe()
416 .f_back.f_lineno)
417 print("Please cut the first two rows from the flatfile for this script\n")
418 return False
419 error.errorPrint("Wrong header line", err='',
420 linenumber=inspect.currentframe().f_back.f_lineno)
385 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
386 reader = csv.DictReader(item, delimiter=';')
387
388 first_row = [*list(reader)[0]]
389 if len(first_row) == 1:
390 error.errorPrint(
391 msg='False delimiter detected',
392 err="Wrong delimiter, use ';'",
393 linenumber=inspect.currentframe().f_back.f_lineno)
394 return False
395
396 if not 'feed_product_type' in first_row:
397 if 'Marke' in first_row:
398 error.errorPrint(
399 msg="Remove the first 2 rows of the amazon flatfile",
400 err='',
401 linenumber=inspect.currentframe().f_back.f_lineno)
421 402 return False return False
422 return True
423
424 except Exception as err:
425 error.warnPrint("Flatfile check failed",
426 sys.exc_info()[2].tb_lineno, err)
403 error.errorPrint(
404 msg="Wrong header line", err='',
405 linenumber=inspect.currentframe().f_back.f_lineno)
406 return False
407 return True
427 408
428 409 def checkEncoding(file_dict): def checkEncoding(file_dict):
429 try:
430 with open(file_dict['path'], mode='rb') as item:
431 try:
432 raw_data = item.read()
433 except Exception as err:
434 print("ERROR: {0}\n".format(err))
435 error.errorPrint("check Encoding reading failed", err,
436 sys.exc_info()[2].tb_lineno)
437 file_dict['encoding'] = chardet.detect(raw_data)['encoding']
438 print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data)))
439
440 except Exception as err:
441 error.errorPrint("check Encoding failed", err,
442 sys.exc_info()[2].tb_lineno)
410 with open(file_dict['path'], mode='rb') as item:
411 try:
412 raw_data = item.read()
413 except Exception as err:
414 print("ERROR: {0}\n".format(err))
415 error.errorPrint(
416 msg="check Encoding reading failed", err=err,
417 linenumber=sys.exc_info()[2].tb_lineno)
418 file_dict['encoding'] = chardet.detect(raw_data)['encoding']
419 print("chardet data for {0}\n{1}\n"
420 .format(file_dict['path'], chardet.detect(raw_data)))
443 421
444 422 return file_dict return file_dict
445 423
 
... ... def get_variation_id(exportfile, sku):
461 439 exp = pandas.read_csv(exportfile, exp = pandas.read_csv(exportfile,
462 440 sep=';') sep=';')
463 441
464 if not len(exp.index):
442 if len(exp.index) == 0:
465 443 error.warnPrint( error.warnPrint(
466 444 msg='exp is empty, skip variation ID', err='', msg='exp is empty, skip variation ID', err='',
467 445 linenumber=inspect.currentframe().f_back.f_lineno) linenumber=inspect.currentframe().f_back.f_lineno)
468 446 return 0 return 0
469 447
470 if(not len(exp.columns[exp.columns.str.contains(pat='Variation.id')]) or
471 not len(exp.columns[exp.columns.str.contains(pat='Variation.number')])):
448 if(len(exp.columns[exp.columns.str.contains(pat='Variation.id')]) == 0 or
449 len(exp.columns[exp.columns.str.contains(pat='Variation.number')]) == 0):
472 450 error.warnPrint( error.warnPrint(
473 451 msg="Exportfile requires fields 'Variation.id'&'Variation.number'", msg="Exportfile requires fields 'Variation.id'&'Variation.number'",
474 452 err='', linenumber=inspect.currentframe().f_back.f_lineno) err='', linenumber=inspect.currentframe().f_back.f_lineno)
475 453 return 0 return 0
476 454
477 455 variation = exp[exp['Variation.number'] == sku] variation = exp[exp['Variation.number'] == sku]
478 if not len(variation.index):
456 if len(variation.index) == 0:
479 457 error.warnPrint( error.warnPrint(
480 458 msg=str(f"{sku} not found in Plentymarkets export"), msg=str(f"{sku} not found in Plentymarkets export"),
481 459 err='', linenumber=inspect.currentframe().f_back.f_lineno) err='', linenumber=inspect.currentframe().f_back.f_lineno)
 
... ... def get_externalid(dataset, numberlist):
505 483 extern_id = pandas.read_excel(numberlist['path']) extern_id = pandas.read_excel(numberlist['path'])
506 484 except xlrd.biffh.XLRDError as err: except xlrd.biffh.XLRDError as err:
507 485 error.errorPrint( error.errorPrint(
508 msg=str(f"..{intern['path'][-30:]} requires type [.xlsx]"),
486 msg=str(f"..{numberlist['path'][-30:]} requires type [.xlsx]"),
509 487 err=err, linenumber=sys.exc_info()[2].tb_lineno) err=err, linenumber=sys.exc_info()[2].tb_lineno)
510 488 if os.name == 'nt': if os.name == 'nt':
511 489 print("press ENTER to continue..") print("press ENTER to continue..")
512 490 input() input()
513 exit(1)
491 sys.exit(1)
514 492
515 493 if extern_id.empty: if extern_id.empty:
516 494 error.warnPrint( error.warnPrint(
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using git:
git clone git://git.rocketgit.com/user/initBasti/Amazon2PlentySync

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main