File packages/item_upload.py changed (mode: 100644) (index a916f3e..048ea5a) |
1 |
1 |
import csv |
import csv |
2 |
2 |
import sys |
import sys |
3 |
3 |
import re |
import re |
4 |
|
import chardet |
|
5 |
4 |
import collections |
import collections |
6 |
|
import colorama |
|
7 |
5 |
import inspect |
import inspect |
8 |
|
from sys import exit |
|
|
6 |
|
import chardet |
|
7 |
|
import colorama |
9 |
8 |
from packages import barcode, amazon_data_upload, price_upload |
from packages import barcode, amazon_data_upload, price_upload |
10 |
9 |
|
|
11 |
10 |
|
|
|
... |
... |
class WrongEncodingException(Exception): |
14 |
13 |
|
|
15 |
14 |
try: |
try: |
16 |
15 |
def errorPrint(msg, err, linenumber): |
def errorPrint(msg, err, linenumber): |
17 |
|
print(colorama.Fore.RED) |
|
18 |
|
print("ERROR:\nline:{0}\t{1}\tError:{2}".format(linenumber, msg, err)) |
|
|
16 |
|
print(colorama.Fore.LIGHTRED_EX) |
|
17 |
|
if err: |
|
18 |
|
print("ERROR:\nline:{0}\t{1}\tError:{2}" |
|
19 |
|
.format(linenumber, msg, err)) |
|
20 |
|
else: |
|
21 |
|
print("ERROR:\nline:{0}\t{1}" |
|
22 |
|
.format(linenumber, msg)) |
19 |
23 |
print(colorama.Style.RESET_ALL) |
print(colorama.Style.RESET_ALL) |
20 |
24 |
except AttributeError: |
except AttributeError: |
21 |
25 |
def errorPrint(msg, err, linenumber): |
def errorPrint(msg, err, linenumber): |
22 |
|
print("ERROR:\nline:{0}\t{1}\tError:{2}".format(linenumber, msg, err)) |
|
|
26 |
|
if err: |
|
27 |
|
print("ERROR:\nline:{0}\t{1}\tError:{2}" |
|
28 |
|
.format(linenumber, msg, err)) |
|
29 |
|
else: |
|
30 |
|
print("ERROR:\nline:{0}\t{1}" |
|
31 |
|
.format(linenumber, msg)) |
23 |
32 |
|
|
24 |
33 |
try: |
try: |
25 |
34 |
def warnPrint(msg, err, linenumber): |
def warnPrint(msg, err, linenumber): |
26 |
35 |
print(colorama.Fore.YELLOW) |
print(colorama.Fore.YELLOW) |
27 |
|
print("WARNING:\nline:{0}\t{1}\tWarning:{2}" |
|
28 |
|
.format(linenumber, msg, err)) |
|
|
36 |
|
if err: |
|
37 |
|
print("WARNING:\nline:{0}\t{1}\tWarning:{2}" |
|
38 |
|
.format(linenumber, msg, err)) |
|
39 |
|
else: |
|
40 |
|
print("WARNING:\nline:{0}\t{1}" |
|
41 |
|
.format(linenumber, msg)) |
29 |
42 |
print(colorama.Style.RESET_ALL) |
print(colorama.Style.RESET_ALL) |
30 |
43 |
except AttributeError: |
except AttributeError: |
31 |
|
def warnPrint(msg, err, linenumber): |
|
32 |
|
print("WARNING:\nline:{0}\t{1}\tWarning:{2}" |
|
33 |
|
.format(linenumber, msg, err)) |
|
|
44 |
|
def warnPrint(msg, linenumber, err=''): |
|
45 |
|
if err: |
|
46 |
|
print("WARNING:\nline:{0}\t{1}\tWarning:{2}" |
|
47 |
|
.format(linenumber, msg, err)) |
|
48 |
|
else: |
|
49 |
|
print("WARNING:\nline:{0}\t{1}" |
|
50 |
|
.format(linenumber, msg)) |
34 |
51 |
|
|
35 |
|
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, filename): |
|
36 |
|
# The column headers for the output file as expected from the |
|
37 |
|
# plentymarkets dataformat |
|
|
52 |
|
|
|
53 |
|
def itemUpload(flatfile, intern, stocklist, folder, input_data, filename): |
38 |
54 |
column_names = ['Parent-SKU', 'SKU', |
column_names = ['Parent-SKU', 'SKU', |
39 |
|
'isParent', |
|
|
55 |
|
'is_parent', |
40 |
56 |
'Length', 'Width', |
'Length', 'Width', |
41 |
57 |
'Height', 'Weight', |
'Height', 'Weight', |
42 |
58 |
'Name', 'MainWarehouse', |
'Name', 'MainWarehouse', |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
61 |
77 |
'Item-Flag-1' |
'Item-Flag-1' |
62 |
78 |
] |
] |
63 |
79 |
|
|
64 |
|
# Unpack File and scrap data |
|
65 |
|
# INPUT |
|
66 |
|
# -------------------------------------------------------------- |
|
67 |
|
Data = dict() |
|
68 |
|
sorted_Data = collections.OrderedDict() |
|
|
80 |
|
data = dict() |
|
81 |
|
sorted_data = collections.OrderedDict() |
69 |
82 |
package_properties = {} |
package_properties = {} |
70 |
83 |
barcode_data = {} |
barcode_data = {} |
71 |
|
isParent = False |
|
|
84 |
|
is_parent = False |
72 |
85 |
position = 0 |
position = 0 |
73 |
86 |
|
|
74 |
87 |
# Get sets of all colors and sizes for each parent |
# Get sets of all colors and sizes for each parent |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
76 |
89 |
color_size_sets = {} |
color_size_sets = {} |
77 |
90 |
color_size_sets = findSimilarAttr(flatfile) |
color_size_sets = findSimilarAttr(flatfile) |
78 |
91 |
|
|
79 |
|
# PACKAGE PROPERTIES |
|
80 |
92 |
package_properties = getProperties(flatfile) |
package_properties = getProperties(flatfile) |
81 |
93 |
group_parent = '' |
group_parent = '' |
82 |
94 |
|
|
83 |
95 |
try: |
try: |
84 |
|
# FILL DICTIONARY |
|
85 |
96 |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
86 |
97 |
reader = csv.DictReader(item, delimiter=";") |
reader = csv.DictReader(item, delimiter=";") |
87 |
98 |
|
|
88 |
99 |
for row in reader: |
for row in reader: |
89 |
|
# transform the text format to integer in order to adjust the |
|
90 |
|
# height, width, length numbers from centimeter to milimeter |
|
91 |
100 |
try: |
try: |
92 |
|
# SET KEYWORDS |
|
93 |
101 |
keywords = '' |
keywords = '' |
94 |
|
if(row['generic_keywords']): |
|
|
102 |
|
if row['generic_keywords']: |
95 |
103 |
keywords = row['generic_keywords'] |
keywords = row['generic_keywords'] |
96 |
104 |
|
|
97 |
|
if(not(keywords)): |
|
|
105 |
|
if not keywords: |
98 |
106 |
try: |
try: |
99 |
107 |
raise barcode.EmptyFieldWarning('generic_keywords') |
raise barcode.EmptyFieldWarning('generic_keywords') |
100 |
108 |
except Exception: |
except Exception: |
101 |
|
print("Generic Keywords are empty!") |
|
|
109 |
|
warnPrint("Generic Keywords are empty!", |
|
110 |
|
inspect.currentframe().f_back.f_lineno) |
102 |
111 |
|
|
103 |
|
# SET ATTRIBUTES |
|
104 |
112 |
try: |
try: |
105 |
113 |
attributes = '' |
attributes = '' |
106 |
|
if(row['parent_child'] == 'parent'): |
|
107 |
|
isParent = True |
|
|
114 |
|
if row['parent_child'] == 'parent': |
|
115 |
|
is_parent = True |
108 |
116 |
group_parent = row['item_sku'] |
group_parent = row['item_sku'] |
109 |
117 |
position = 0 |
position = 0 |
110 |
|
if(row['parent_child'] == 'child'): |
|
111 |
|
isParent = False |
|
|
118 |
|
if row['parent_child'] == 'child': |
|
119 |
|
is_parent = False |
112 |
120 |
attributes = getAttributes(dataset=row, |
attributes = getAttributes(dataset=row, |
113 |
|
sets=color_size_sets) |
|
|
121 |
|
sets=color_size_sets) |
114 |
122 |
if(group_parent and row['parent_sku'] == group_parent): |
if(group_parent and row['parent_sku'] == group_parent): |
115 |
123 |
position += 1 |
position += 1 |
116 |
124 |
except Exception as err: |
except Exception as err: |
117 |
|
warnPrint("Attribute setting failed", err, |
|
118 |
|
sys.exc_info()[2].tb_lineno) |
|
|
125 |
|
warnPrint("Attribute setting failed", |
|
126 |
|
sys.exc_info()[2].tb_lineno, err) |
119 |
127 |
try: |
try: |
120 |
128 |
values = [ |
values = [ |
121 |
|
row['parent_sku'], row['item_sku'], |
|
122 |
|
isParent, |
|
123 |
|
package_properties['length'] * 10, |
|
124 |
|
package_properties['width'] * 10, |
|
125 |
|
package_properties['height'] * 10, |
|
126 |
|
package_properties['weight'], |
|
127 |
|
row['item_name'], '104', |
|
128 |
|
attributes, position, |
|
129 |
|
'62', keywords, |
|
130 |
|
row['brand_name'].upper(), '3', |
|
131 |
|
input_data['name'], row['product_description'], |
|
132 |
|
'', # externalID |
|
133 |
|
'1', '1', # NetStock pos = Vis & neg = Invis |
|
134 |
|
'2', input_data['categories'], |
|
135 |
|
input_data['categories'][0:2], input_data['categories'][0:2], |
|
136 |
|
'Y', 'Y', # mandant |
|
137 |
|
'', '', # barcode |
|
138 |
|
'Y', 'Y', # marketconnection |
|
139 |
|
'Y', 'Y', # marketconnection |
|
140 |
|
'Y', # marketconnection |
|
141 |
|
'', '', # market & accout id amazonsku |
|
142 |
|
'', '', # sku & parentsku amazonsku |
|
143 |
|
'', '', '',# producttype & fba amazon |
|
144 |
|
'','','','','','',# prices |
|
145 |
|
'', '', '', #asin |
|
146 |
|
input_data['marking'] |
|
147 |
|
] |
|
148 |
|
|
|
149 |
|
except KeyError: |
|
|
129 |
|
row['parent_sku'], row['item_sku'], |
|
130 |
|
is_parent, |
|
131 |
|
package_properties['length'] * 10, |
|
132 |
|
package_properties['width'] * 10, |
|
133 |
|
package_properties['height'] * 10, |
|
134 |
|
package_properties['weight'], |
|
135 |
|
row['item_name'], '104', |
|
136 |
|
attributes, position, |
|
137 |
|
'62', keywords, |
|
138 |
|
row['brand_name'].upper(), '3', |
|
139 |
|
input_data['name'], row['product_description'], |
|
140 |
|
'', # externalID |
|
141 |
|
'1', '1', # NetStock pos = Vis & neg = Invis |
|
142 |
|
'2', input_data['categories'], |
|
143 |
|
input_data['categories'][0:3], input_data['categories'][0:3], |
|
144 |
|
'Y', 'Y', # mandant |
|
145 |
|
'', '', # barcode |
|
146 |
|
'Y', 'Y', # marketconnection |
|
147 |
|
'Y', 'Y', # marketconnection |
|
148 |
|
'Y', # marketconnection |
|
149 |
|
'', '', # market & accout id amazonsku |
|
150 |
|
'', '', # sku & parentsku amazonsku |
|
151 |
|
'', '', '', # producttype & fba amazon |
|
152 |
|
'', '', '', '', '', '', # prices |
|
153 |
|
'', '', '', #asin |
|
154 |
|
input_data['marking'] |
|
155 |
|
] |
|
156 |
|
|
|
157 |
|
except KeyError as kerr: |
|
158 |
|
warnPrint('itemUpload: key not found in flatfile', |
|
159 |
|
inspect.currentframe().f_back.f_lineno, |
|
160 |
|
err=kerr) |
150 |
161 |
raise KeyError |
raise KeyError |
151 |
|
print('Error at the Values') |
|
152 |
162 |
except Exception as err: |
except Exception as err: |
153 |
|
print("Error @ setting values: line:{0}, err:{1}" |
|
154 |
|
.format(sys.exc_info()[2].tb_lineno, err)) |
|
155 |
|
Data[row['item_sku']] = collections.OrderedDict(zip(column_names, values)) |
|
|
163 |
|
errorPrint("itemUpload: setting values failed", err, |
|
164 |
|
sys.exc_info()[2].tb_lineno) |
|
165 |
|
data[row['item_sku']] = collections.OrderedDict(zip(column_names, values)) |
156 |
166 |
except KeyError as err: |
except KeyError as err: |
157 |
167 |
errorPrint("Reading file failed", err, |
errorPrint("Reading file failed", err, |
158 |
168 |
sys.exc_info()[2].tb_lineno) |
sys.exc_info()[2].tb_lineno) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
163 |
173 |
reader = csv.DictReader(item, delimiter=";") |
reader = csv.DictReader(item, delimiter=";") |
164 |
174 |
for row in reader: |
for row in reader: |
165 |
175 |
try: |
try: |
166 |
|
if(row['amazon_sku'] in [*Data]): |
|
167 |
|
Data[row['amazon_sku']]['ExternalID'] = row['full_number'] |
|
|
176 |
|
if row['amazon_sku'] in list(data.keys()): |
|
177 |
|
data[row['amazon_sku']]['ExternalID'] = row['full_number'] |
168 |
178 |
except KeyError as keyerr: |
except KeyError as keyerr: |
169 |
|
print(keyerr) |
|
170 |
|
print("Keyerror at the Intern Number addition") |
|
|
179 |
|
warnPrint("key was not found in intern number list", |
|
180 |
|
sys.exc_info()[2].tb_lineno, keyerr) |
171 |
181 |
|
|
172 |
182 |
# Include the barcodes & asin |
# Include the barcodes & asin |
173 |
183 |
barcode_data = barcode.barcode_Upload(flatfile, stocklist) |
barcode_data = barcode.barcode_Upload(flatfile, stocklist) |
174 |
184 |
|
|
175 |
185 |
for row in barcode_data: |
for row in barcode_data: |
176 |
186 |
try: |
try: |
177 |
|
if(row in [*Data]): |
|
178 |
|
Data[row]['EAN_Barcode'] = barcode_data[row]['EAN_Barcode'] |
|
179 |
|
Data[row]['FNSKU_Barcode'] = barcode_data[row]['FNSKU_Barcode'] |
|
180 |
|
Data[row]['ASIN-countrycode'] = barcode_data[row]['ASIN-countrycode'] |
|
181 |
|
Data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] |
|
182 |
|
Data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] |
|
|
187 |
|
if row in list(data.keys()): |
|
188 |
|
data[row]['EAN_Barcode'] = barcode_data[row]['EAN_Barcode'] |
|
189 |
|
data[row]['FNSKU_Barcode'] = barcode_data[row]['FNSKU_Barcode'] |
|
190 |
|
data[row]['ASIN-countrycode'] = barcode_data[row]['ASIN-countrycode'] |
|
191 |
|
data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] |
|
192 |
|
data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] |
183 |
193 |
except Exception as err: |
except Exception as err: |
184 |
194 |
errorPrint("Barcode part for "+row, err, |
errorPrint("Barcode part for "+row, err, |
185 |
195 |
sys.exc_info()[2].tb_lineno) |
sys.exc_info()[2].tb_lineno) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
189 |
199 |
|
|
190 |
200 |
for row in sku_data: |
for row in sku_data: |
191 |
201 |
try: |
try: |
192 |
|
if(row in [*Data]): |
|
193 |
|
Data[row]['marketid'] = sku_data[row]['MarketID'] |
|
194 |
|
Data[row]['accountid'] = sku_data[row]['MarketAccountID'] |
|
195 |
|
Data[row]['amazon_sku'] = sku_data[row]['SKU'] |
|
196 |
|
Data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] |
|
|
202 |
|
if row in list(data.keys()): |
|
203 |
|
data[row]['marketid'] = sku_data[row]['MarketID'] |
|
204 |
|
data[row]['accountid'] = sku_data[row]['MarketAccountID'] |
|
205 |
|
data[row]['amazon_sku'] = sku_data[row]['SKU'] |
|
206 |
|
data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] |
197 |
207 |
except Exception as err: |
except Exception as err: |
198 |
208 |
errorPrint("SKU part for "+row, err, |
errorPrint("SKU part for "+row, err, |
199 |
209 |
sys.exc_info()[2].tb_lineno) |
sys.exc_info()[2].tb_lineno) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
203 |
213 |
|
|
204 |
214 |
for row in ama_data: |
for row in ama_data: |
205 |
215 |
try: |
try: |
206 |
|
if(row in [*Data]): |
|
207 |
|
Data[row]['amazon-producttype'] = ama_data[row]['ItemAmazonProductType'] |
|
208 |
|
Data[row]['fba-enabled'] = ama_data[row]['ItemAmazonFBA'] |
|
209 |
|
Data[row]['fba-shipping'] = ama_data[row]['ItemShippingWithAmazonFBA'] |
|
|
216 |
|
if row in list(data.keys()): |
|
217 |
|
data[row]['amazon-producttype'] = ama_data[row]['ItemAmazonProductType'] |
|
218 |
|
data[row]['fba-enabled'] = ama_data[row]['ItemAmazonFBA'] |
|
219 |
|
data[row]['fba-shipping'] = ama_data[row]['ItemShippingWithAmazonFBA'] |
210 |
220 |
except Exception as err: |
except Exception as err: |
211 |
|
errorPrint("AmazonData part for "+row, err, |
|
|
221 |
|
errorPrint("Amazondata part for "+row, err, |
212 |
222 |
sys.exc_info()[2].tb_lineno) |
sys.exc_info()[2].tb_lineno) |
213 |
223 |
|
|
214 |
224 |
# Include the price |
# Include the price |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
216 |
226 |
|
|
217 |
227 |
for row in price_data: |
for row in price_data: |
218 |
228 |
try: |
try: |
219 |
|
if(row in [*Data]): |
|
220 |
|
Data[row]['price-price'] = price_data[row]['price'] |
|
221 |
|
Data[row]['ebay-price'] = price_data[row]['ebay'] |
|
222 |
|
Data[row]['amazon-price'] = price_data[row]['amazon'] |
|
223 |
|
Data[row]['webshop-price'] = price_data[row]['webshop'] |
|
224 |
|
Data[row]['etsy-price'] = price_data[row]['etsy'] |
|
|
229 |
|
if row in list(data.keys()): |
|
230 |
|
data[row]['price-price'] = price_data[row]['price'] |
|
231 |
|
data[row]['ebay-price'] = price_data[row]['ebay'] |
|
232 |
|
data[row]['amazon-price'] = price_data[row]['amazon'] |
|
233 |
|
data[row]['webshop-price'] = price_data[row]['webshop'] |
|
234 |
|
data[row]['etsy-price'] = price_data[row]['etsy'] |
225 |
235 |
except Exception as err: |
except Exception as err: |
226 |
236 |
errorPrint("Price part for "+row, err, |
errorPrint("Price part for "+row, err, |
227 |
237 |
sys.exc_info()[2].tb_lineno) |
sys.exc_info()[2].tb_lineno) |
228 |
238 |
|
|
229 |
|
# Write Data into new CSV for Upload |
|
230 |
|
# OUTPUT |
|
231 |
|
# -------------------------------------------------------------- |
|
232 |
|
|
|
233 |
239 |
# Sort the dictionary to make sure that the parents are the first variant of each item |
# Sort the dictionary to make sure that the parents are the first variant of each item |
234 |
|
sorted_Data = sortProducts(Data) |
|
|
240 |
|
sorted_data = sortProducts(data) |
235 |
241 |
|
|
236 |
|
barcode.writeCSV(sorted_Data, "item", column_names, folder, filename) |
|
|
242 |
|
barcode.writeCSV(sorted_data, "item", column_names, folder, filename) |
237 |
243 |
except UnicodeDecodeError as err: |
except UnicodeDecodeError as err: |
238 |
|
print("Decode Error at line: {0}, err: {1}".format(sys.exc_info()[2].tb_lineno, err)) |
|
239 |
244 |
errorPrint("decoding problem", err, |
errorPrint("decoding problem", err, |
240 |
|
sys.exc_info()[2].tb_lineno) |
|
|
245 |
|
sys.exc_info()[2].tb_lineno) |
241 |
246 |
print("press ENTER to continue..") |
print("press ENTER to continue..") |
242 |
247 |
input() |
input() |
243 |
248 |
sys.exit() |
sys.exit() |
|
... |
... |
def itemPropertyUpload(flatfile, folder, filename): |
277 |
282 |
'29', '45', |
'29', '45', |
278 |
283 |
'46', '47', |
'46', '47', |
279 |
284 |
'48', '49'] |
'48', '49'] |
280 |
|
property_id = dict( zip(property_names, id_values) ) |
|
|
285 |
|
property_id = dict(zip(property_names, id_values)) |
281 |
286 |
|
|
282 |
287 |
properties = dict() |
properties = dict() |
283 |
288 |
|
|
284 |
289 |
for row in reader: |
for row in reader: |
285 |
|
if(row['parent_child'] == 'parent'): |
|
|
290 |
|
if row['parent_child'] == 'parent': |
286 |
291 |
try: |
try: |
287 |
|
use_names = [i for i in property_names if i in [*row]] |
|
|
292 |
|
use_names =\ |
|
293 |
|
[i for i in property_names if i in list(row.keys())] |
288 |
294 |
values = [row[i] for i in use_names] |
values = [row[i] for i in use_names] |
289 |
295 |
except ValueError as err: |
except ValueError as err: |
290 |
|
warnPrint("No Value for "+i, err, |
|
291 |
|
sys.exc_info()[2].tb_lineno) |
|
|
296 |
|
warnPrint("No Value", |
|
297 |
|
sys.exc_info()[2].tb_lineno, err) |
292 |
298 |
|
|
293 |
299 |
# Check for empty values |
# Check for empty values |
294 |
300 |
properties[row['item_sku']] = dict(zip(use_names, values)) |
properties[row['item_sku']] = dict(zip(use_names, values)) |
295 |
301 |
|
|
296 |
302 |
column_names = ['SKU', 'ID-property', 'Value', 'Lang', 'Active'] |
column_names = ['SKU', 'ID-property', 'Value', 'Lang', 'Active'] |
297 |
|
Data = {} |
|
298 |
|
for index, row in enumerate( properties ): |
|
|
303 |
|
data = {} |
|
304 |
|
for row in properties: |
299 |
305 |
for prop in use_names: |
for prop in use_names: |
300 |
306 |
try: |
try: |
301 |
307 |
values = [row, property_id[prop], |
values = [row, property_id[prop], |
302 |
308 |
properties[row][prop], 'DE', 1] |
properties[row][prop], 'DE', 1] |
303 |
309 |
|
|
304 |
|
Data[row + prop] = dict(zip(column_names, values)) |
|
|
310 |
|
data[row + prop] = dict(zip(column_names, values)) |
305 |
311 |
except KeyError as kerr: |
except KeyError as kerr: |
306 |
312 |
errorPrint("Key was not found in the flatfile", kerr, |
errorPrint("Key was not found in the flatfile", kerr, |
307 |
|
sys.exc_info()[2].tb_lineno) |
|
|
313 |
|
sys.exc_info()[2].tb_lineno) |
308 |
314 |
|
|
309 |
315 |
|
|
310 |
|
barcode.writeCSV(Data, "Item_Merkmale", column_names, folder, filename) |
|
|
316 |
|
barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename) |
311 |
317 |
|
|
312 |
318 |
def getProperties(flatfile): |
def getProperties(flatfile): |
313 |
319 |
|
|
|
... |
... |
def getProperties(flatfile): |
323 |
329 |
# Get the package properties from one of the childs or parent |
# Get the package properties from one of the childs or parent |
324 |
330 |
for row in reader: |
for row in reader: |
325 |
331 |
try: |
try: |
326 |
|
if(row['package_height'] and |
|
327 |
|
row['package_length'] and |
|
328 |
|
row['package_width'] and |
|
329 |
|
row['package_weight'] and |
|
330 |
|
not(properties[ 'height' ])): |
|
331 |
|
|
|
332 |
|
properties[ 'height' ] = int(row['package_height']) |
|
333 |
|
properties[ 'length' ] = int(row['package_length']) |
|
334 |
|
properties[ 'width' ] = int(row['package_width']) |
|
335 |
|
properties[ 'weight' ] = int(row['package_weight']) |
|
336 |
|
elif(properties[ 'height' ]): |
|
337 |
|
break |
|
338 |
|
|
|
339 |
|
# if the number is a floating point number it has to be |
|
340 |
|
# transformed into a float first befor the integer conversion |
|
|
332 |
|
try: |
|
333 |
|
if(row['package_height'] and row['package_length'] and |
|
334 |
|
row['package_width'] and row['package_weight'] and |
|
335 |
|
not properties['height']): |
|
336 |
|
|
|
337 |
|
properties['height'] = int(row['package_height']) |
|
338 |
|
properties['length'] = int(row['package_length']) |
|
339 |
|
properties['width'] = int(row['package_width']) |
|
340 |
|
properties['weight'] = int(row['package_weight']) |
|
341 |
|
elif properties['height']: |
|
342 |
|
break |
|
343 |
|
|
|
344 |
|
# if the number is a floating point number it has to be |
|
345 |
|
# transformed into a float first befor the integer conversion |
|
346 |
|
except ValueError as err: |
|
347 |
|
properties['height'] = int(float(row['package_height'])) |
|
348 |
|
properties['length'] = int(float(row['package_length'])) |
|
349 |
|
properties['width'] = int(float(row['package_width'])) |
|
350 |
|
properties['weight'] = int(float(row['package_weight'])) |
341 |
351 |
except ValueError as err: |
except ValueError as err: |
342 |
|
properties[ 'height' ] = int(float(row['package_height'])) |
|
343 |
|
properties[ 'length' ] = int(float(row['package_length'])) |
|
344 |
|
properties[ 'width' ] = int(float(row['package_width'])) |
|
345 |
|
properties[ 'weight' ] = int(float(row['package_weight'])) |
|
|
352 |
|
errorPrint("Parent has no package measurements", err, |
|
353 |
|
sys.exc_info()[2].tb_lineno) |
|
354 |
|
sys.exit() |
346 |
355 |
except Exception as err: |
except Exception as err: |
347 |
356 |
errorPrint("getProperties setting values failed", err, |
errorPrint("getProperties setting values failed", err, |
348 |
|
sys.exc_info()[2].tb_lineno) |
|
349 |
|
|
|
350 |
|
except ValueError as err: |
|
351 |
|
errorPrint("Parent has no package measurements", err, |
|
352 |
|
sys.exc_info()[2].tb_lineno) |
|
353 |
|
exit() |
|
|
357 |
|
sys.exc_info()[2].tb_lineno) |
354 |
358 |
|
|
355 |
359 |
return properties |
return properties |
356 |
360 |
|
|
|
... |
... |
def getAttributes(dataset, sets): |
358 |
362 |
|
|
359 |
363 |
output_string = '' |
output_string = '' |
360 |
364 |
try: |
try: |
361 |
|
if(dataset['parent_sku'] in [*sets]): |
|
362 |
|
if(len(sets[dataset['parent_sku']]['color']) > 1): |
|
|
365 |
|
if dataset['parent_sku'] in list(sets.keys()): |
|
366 |
|
if len(sets[dataset['parent_sku']]['color']) > 1: |
363 |
367 |
output_string = 'color_name:' + dataset['color_name'] |
output_string = 'color_name:' + dataset['color_name'] |
364 |
368 |
else: |
else: |
365 |
|
print("{0} not found in {1}".format( |
|
366 |
|
dataset['parent_sku'], ','.join([*sets]) |
|
367 |
|
)) |
|
|
369 |
|
print("{0} not found in {1}" |
|
370 |
|
.format(dataset['parent_sku'], ','.join(list(sets.keys())))) |
368 |
371 |
except Exception as err: |
except Exception as err: |
369 |
372 |
errorPrint("Adding of color attribute failed", err, |
errorPrint("Adding of color attribute failed", err, |
370 |
|
sys.exc_info()[2].tb_lineno) |
|
|
373 |
|
sys.exc_info()[2].tb_lineno) |
371 |
374 |
try: |
try: |
372 |
|
if(len(sets[dataset['parent_sku']]['size']) > 1): |
|
373 |
|
if(not(output_string)): |
|
|
375 |
|
if len(sets[dataset['parent_sku']]['size']) > 1: |
|
376 |
|
if not output_string: |
374 |
377 |
output_string = 'size_name:' + dataset['size_name'] |
output_string = 'size_name:' + dataset['size_name'] |
375 |
378 |
else: |
else: |
376 |
379 |
output_string = output_string + ';size_name:' + dataset['size_name'] |
output_string = output_string + ';size_name:' + dataset['size_name'] |
377 |
380 |
except Exception as err: |
except Exception as err: |
378 |
381 |
errorPrint("Adding of size attribute failed", err, |
errorPrint("Adding of size attribute failed", err, |
379 |
|
sys.exc_info()[2].tb_lineno) |
|
|
382 |
|
sys.exc_info()[2].tb_lineno) |
380 |
383 |
return output_string |
return output_string |
381 |
384 |
|
|
382 |
385 |
def findSimilarAttr(flatfile): |
def findSimilarAttr(flatfile): |
383 |
386 |
|
|
384 |
|
Data = {} |
|
|
387 |
|
data = {} |
385 |
388 |
|
|
386 |
389 |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
387 |
390 |
reader = csv.DictReader(item, delimiter=";") |
reader = csv.DictReader(item, delimiter=";") |
388 |
391 |
|
|
389 |
392 |
for row in reader: |
for row in reader: |
390 |
393 |
# If it is a parent create a new dictionary with 2 sets for color and size |
# If it is a parent create a new dictionary with 2 sets for color and size |
391 |
|
if(row['parent_child'].lower() == 'parent'): |
|
|
394 |
|
if row['parent_child'].lower() == 'parent': |
392 |
395 |
color = set() |
color = set() |
393 |
396 |
size = set() |
size = set() |
394 |
|
Data[row['item_sku']] = {'color':color, 'size':size} |
|
395 |
|
# If it is a child search through the Data dictionary for a match |
|
396 |
|
if(row['parent_child'] == 'child'): |
|
397 |
|
for line in Data: |
|
398 |
|
if(row['parent_sku'] == line): |
|
399 |
|
Data[row['parent_sku']]['color'].add(row['color_name']) |
|
400 |
|
Data[row['parent_sku']]['size'].add(row['size_name']) |
|
401 |
|
return Data |
|
|
397 |
|
data[row['item_sku']] = {'color':color, 'size':size} |
|
398 |
|
# If it is a child search through the data dictionary for a match |
|
399 |
|
if row['parent_child'] == 'child': |
|
400 |
|
for line in data: |
|
401 |
|
if row['parent_sku'] == line: |
|
402 |
|
data[row['parent_sku']]['color'].add(row['color_name']) |
|
403 |
|
data[row['parent_sku']]['size'].add(row['size_name']) |
|
404 |
|
return data |
402 |
405 |
|
|
403 |
406 |
def sortProducts(dataset): |
def sortProducts(dataset): |
404 |
407 |
item_list = dataset.items() |
item_list = dataset.items() |
405 |
408 |
new_dict = collections.OrderedDict() |
new_dict = collections.OrderedDict() |
406 |
|
parent_dict = collections.OrderedDict() |
|
|
409 |
|
# parent_dict = collections.OrderedDict() |
407 |
410 |
child_dict = collections.OrderedDict() |
child_dict = collections.OrderedDict() |
408 |
|
position_of_parent = 0 |
|
409 |
411 |
|
|
410 |
412 |
# Go through the items of the dataset |
# Go through the items of the dataset |
411 |
413 |
for item in item_list: |
for item in item_list: |
412 |
|
if(not(item[0] in [* new_dict ])): |
|
413 |
|
if(item[1]['isParent']): |
|
|
414 |
|
if not item[0] in list(new_dict.keys()): |
|
415 |
|
if item[1]['is_parent']: |
414 |
416 |
# add the parent to the new dict |
# add the parent to the new dict |
415 |
417 |
new_dict[item[0]] = item[1] |
new_dict[item[0]] = item[1] |
416 |
418 |
# get all the children and update the itemlist without them |
# get all the children and update the itemlist without them |
|
... |
... |
def searchChild(item_list, parent): |
425 |
427 |
child_dict = collections.OrderedDict() |
child_dict = collections.OrderedDict() |
426 |
428 |
|
|
427 |
429 |
for item in item_list: |
for item in item_list: |
428 |
|
if(item[1]['Parent-SKU'] == parent): |
|
|
430 |
|
if item[1]['Parent-SKU'] == parent: |
429 |
431 |
child_dict[item[0]] = item[1] |
child_dict[item[0]] = item[1] |
430 |
432 |
|
|
431 |
433 |
return child_dict |
return child_dict |
|
... |
... |
def checkFlatfile(flatfile): |
435 |
437 |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
436 |
438 |
reader = csv.DictReader(item, delimiter=';') |
reader = csv.DictReader(item, delimiter=';') |
437 |
439 |
|
|
438 |
|
first_row = [* list(reader)[0] ] |
|
439 |
|
if(len(first_row) == 1): |
|
|
440 |
|
first_row = [*list(reader)[0]] |
|
441 |
|
if len(first_row) == 1: |
440 |
442 |
errorPrint("Wrong delimiter, use ';'", |
errorPrint("Wrong delimiter, use ';'", |
441 |
443 |
'False delimiter detected', |
'False delimiter detected', |
442 |
|
inspect.currentframe().f_back.f_lineno) |
|
|
444 |
|
inspect.currentframe().f_back.f_lineno) |
443 |
445 |
return False |
return False |
444 |
|
if(not( 'feed_product_type' in first_row )): |
|
445 |
|
if( 'Marke' in first_row ): |
|
446 |
|
errorPrint("Only use the last of the 3 header lines", err, |
|
447 |
|
sys.exc_info()[2].tb_lineno) |
|
|
446 |
|
|
|
447 |
|
if not 'feed_product_type' in first_row: |
|
448 |
|
if 'Marke' in first_row: |
|
449 |
|
errorPrint("Only use the last of the 3 header lines", |
|
450 |
|
err='', |
|
451 |
|
linenumber=inspect.currentframe() |
|
452 |
|
.f_back.f_lineno) |
448 |
453 |
print("Please cut the first two rows from the flatfile for this script\n") |
print("Please cut the first two rows from the flatfile for this script\n") |
449 |
454 |
return False |
return False |
450 |
|
else: |
|
451 |
|
errorPrint("Wrong header line", err, |
|
452 |
|
sys.exc_info()[2].tb_lineno) |
|
453 |
|
return False |
|
454 |
|
else: |
|
455 |
|
return True |
|
|
455 |
|
errorPrint("Wrong header line", err='', |
|
456 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
457 |
|
return False |
|
458 |
|
return True |
|
459 |
|
|
456 |
460 |
except Exception as err: |
except Exception as err: |
457 |
|
warnPrint("Flatfile check failed", err, |
|
458 |
|
sys.exc_info()[2].tb_lineno) |
|
|
461 |
|
warnPrint("Flatfile check failed", |
|
462 |
|
sys.exc_info()[2].tb_lineno, err) |
459 |
463 |
|
|
460 |
464 |
def checkEncoding(file_dict): |
def checkEncoding(file_dict): |
461 |
465 |
try: |
try: |
|
... |
... |
def checkEncoding(file_dict): |
465 |
469 |
except Exception as err: |
except Exception as err: |
466 |
470 |
print("ERROR: {0}\n".format(err)) |
print("ERROR: {0}\n".format(err)) |
467 |
471 |
errorPrint("check Encoding reading failed", err, |
errorPrint("check Encoding reading failed", err, |
468 |
|
sys.exc_info()[2].tb_lineno) |
|
|
472 |
|
sys.exc_info()[2].tb_lineno) |
469 |
473 |
file_dict['encoding'] = chardet.detect(raw_data)['encoding'] |
file_dict['encoding'] = chardet.detect(raw_data)['encoding'] |
470 |
474 |
print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) |
print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) |
471 |
475 |
|
|
472 |
476 |
except Exception as err: |
except Exception as err: |
473 |
477 |
errorPrint("check Encoding failed", err, |
errorPrint("check Encoding failed", err, |
474 |
|
sys.exc_info()[2].tb_lineno) |
|
|
478 |
|
sys.exc_info()[2].tb_lineno) |
475 |
479 |
|
|
476 |
480 |
return file_dict |
return file_dict |
477 |
481 |
|
|
478 |
482 |
def getVariationId(exportfile, sku): |
def getVariationId(exportfile, sku): |
479 |
483 |
|
|
480 |
484 |
variationid = 0 |
variationid = 0 |
481 |
|
with open(exportfile['path'], mode = 'r', encoding = exportfile['encoding']) as item: |
|
482 |
|
reader = csv.DictReader(item, delimiter = ';') |
|
|
485 |
|
with open(exportfile['path'], mode='r', |
|
486 |
|
encoding=exportfile['encoding']) as item: |
|
487 |
|
reader = csv.DictReader(item, delimiter=';') |
483 |
488 |
|
|
484 |
489 |
for row in reader: |
for row in reader: |
485 |
|
if('VariationNo' in [*row]): |
|
486 |
|
if(row['VariationNo'] == sku): |
|
|
490 |
|
if 'VariationNo' in list(row.keys()): |
|
491 |
|
if row['VariationNo'] == sku: |
487 |
492 |
variationid = row['VariationId'] |
variationid = row['VariationId'] |
488 |
|
else: |
|
489 |
|
try: |
|
490 |
|
if(row[ [*row][1] ] == sku): |
|
491 |
|
for i in range( len([*row] )): |
|
492 |
|
# matches .id .ID _ID _id ID id |
|
493 |
|
if(re.search(r'\bid', [*row][i].lower())): |
|
494 |
|
print("found ID in {0} value: {1}".format([*row][i], row[ [*row][i] ])) |
|
495 |
|
variationid = row[ [*row][i] ] |
|
496 |
|
except Exception as err: |
|
497 |
|
errorPrint("Looking for irregularities in getVariationId", |
|
498 |
|
err,sys.exc_info()[2].tb_lineno) |
|
499 |
|
print("press ENTER to continue...") |
|
500 |
|
input() |
|
501 |
|
if(not(variationid)): |
|
502 |
|
print("No Variation ID found for {0}\n".format(sku)) |
|
503 |
|
warnPrint("No Variation ID found for "+sku, |
|
504 |
|
err,sys.exc_info()[2].tb_lineno) |
|
|
493 |
|
continue |
|
494 |
|
try: |
|
495 |
|
if row[list(row.keys())[1]] == sku: |
|
496 |
|
for i in range(len(list(row.keys()))): |
|
497 |
|
# matches .id .ID _ID _id ID id |
|
498 |
|
if re.search(r'\bid', [*row][i].lower()): |
|
499 |
|
print("found ID in {0} value: {1}" |
|
500 |
|
.format(list(row.keys())[i], |
|
501 |
|
row[list(row.keys())[i]])) |
|
502 |
|
variationid = row[list(row.keys())[i]] |
|
503 |
|
except Exception as err: |
|
504 |
|
errorPrint("Looking for irregularities in getVariationId", |
|
505 |
|
err, sys.exc_info()[2].tb_lineno) |
|
506 |
|
print("press ENTER to continue...") |
|
507 |
|
input() |
|
508 |
|
if not variationid: |
|
509 |
|
warnPrint(msg="No Variation ID found for "+sku, |
|
510 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
505 |
511 |
|
|
506 |
512 |
return variationid |
return variationid |
507 |
|
|
|