File packages/item_upload.py changed (mode: 100644) (index b416e3d..be45975) |
1 |
1 |
import csv |
import csv |
2 |
2 |
import sys |
import sys |
3 |
|
import re |
|
4 |
3 |
import collections |
import collections |
5 |
4 |
import inspect |
import inspect |
6 |
|
import chardet |
|
7 |
5 |
import os |
import os |
|
6 |
|
import chardet |
8 |
7 |
import pandas |
import pandas |
9 |
8 |
import xlrd |
import xlrd |
10 |
9 |
from packages import barcode, amazon, price, error |
from packages import barcode, amazon, price, error |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, folder, input_data, filename): |
48 |
47 |
package_properties = getProperties(flatfile) |
package_properties = getProperties(flatfile) |
49 |
48 |
group_parent = '' |
group_parent = '' |
50 |
49 |
|
|
51 |
|
try: |
|
52 |
|
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
|
53 |
|
reader = csv.DictReader(item, delimiter=";") |
|
|
50 |
|
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
|
51 |
|
reader = csv.DictReader(item, delimiter=";") |
54 |
52 |
|
|
55 |
|
for row in reader: |
|
|
53 |
|
for row in reader: |
|
54 |
|
keywords = '' |
|
55 |
|
if row['generic_keywords']: |
|
56 |
|
keywords = row['generic_keywords'] |
|
57 |
|
|
|
58 |
|
if not keywords: |
56 |
59 |
try: |
try: |
57 |
|
keywords = '' |
|
58 |
|
if row['generic_keywords']: |
|
59 |
|
keywords = row['generic_keywords'] |
|
60 |
|
|
|
61 |
|
if not keywords: |
|
62 |
|
try: |
|
63 |
|
raise barcode.EmptyFieldWarning('generic_keywords') |
|
64 |
|
except Exception: |
|
65 |
|
error.warnPrint("Generic Keywords are empty!", |
|
66 |
|
inspect.currentframe().f_back.f_lineno) |
|
67 |
|
|
|
68 |
|
item_price = row['standard_price'] |
|
69 |
|
if not item_price and row['parent_child'] == 'parent': |
|
70 |
|
item_price=price.find_price(flatfile, row['item_sku']) |
|
71 |
|
if item_price == -1: |
|
72 |
|
if os.name == 'nt': |
|
73 |
|
print("press ENTER to continue..") |
|
74 |
|
input() |
|
75 |
|
exit(1) |
|
76 |
|
if item_price == '': |
|
77 |
|
error.warnPrint( |
|
78 |
|
msg=str(f"{row['item_sku']}, has no price"), err='', |
|
79 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
80 |
|
|
|
81 |
|
|
|
82 |
|
try: |
|
83 |
|
attributes = '' |
|
84 |
|
if row['parent_child'] == 'parent': |
|
85 |
|
is_parent = True |
|
86 |
|
group_parent = row['item_sku'] |
|
87 |
|
position = 0 |
|
88 |
|
if row['parent_child'] == 'child': |
|
89 |
|
is_parent = False |
|
90 |
|
attributes = getAttributes(dataset=row, |
|
91 |
|
sets=color_size_sets) |
|
92 |
|
if(group_parent and row['parent_sku'] == group_parent): |
|
93 |
|
position += 1 |
|
94 |
|
except Exception as err: |
|
95 |
|
error.warnPrint("Attribute setting failed", |
|
96 |
|
sys.exc_info()[2].tb_lineno, err) |
|
97 |
|
try: |
|
98 |
|
values = [ |
|
99 |
|
row['parent_sku'], row['item_sku'], |
|
100 |
|
is_parent, |
|
101 |
|
package_properties['length'] * 10, |
|
102 |
|
package_properties['width'] * 10, |
|
103 |
|
package_properties['height'] * 10, |
|
104 |
|
package_properties['weight'], |
|
105 |
|
row['item_name'], '104', |
|
106 |
|
attributes, position, |
|
107 |
|
keywords, |
|
108 |
|
input_data['name'], row['product_description'], |
|
109 |
|
'', # externalID |
|
110 |
|
input_data['categories'], |
|
111 |
|
input_data['categories'][0:3], |
|
112 |
|
input_data['categories'][0:3], |
|
113 |
|
input_data['categories'], |
|
114 |
|
input_data['categories'][0:3], input_data['categories'][0:3], |
|
115 |
|
'', '', # barcode |
|
116 |
|
'', '', # market & accout id amazonsku |
|
117 |
|
'', '', # sku & parentsku amazonsku |
|
118 |
|
amazon.get_producttype_id(source=flatfile, |
|
119 |
|
sku=row['item_sku']), |
|
120 |
|
item_price, # prices |
|
121 |
|
'', '', '', #asin |
|
122 |
|
input_data['marking'] |
|
123 |
|
] |
|
124 |
|
|
|
125 |
|
except KeyError as kerr: |
|
126 |
|
error.warnPrint('itemUpload: key not found in flatfile', |
|
127 |
|
inspect.currentframe().f_back.f_lineno, |
|
128 |
|
err=kerr) |
|
129 |
|
raise KeyError |
|
130 |
|
except Exception as err: |
|
131 |
|
error.errorPrint("itemUpload: setting values failed", err, |
|
132 |
|
sys.exc_info()[2].tb_lineno) |
|
133 |
|
|
|
134 |
|
data[row['item_sku']] =\ |
|
135 |
|
collections.OrderedDict(zip(column_names, values)) |
|
136 |
|
except KeyError as err: |
|
137 |
|
error.errorPrint("Reading file failed", err, |
|
138 |
|
sys.exc_info()[2].tb_lineno) |
|
139 |
|
return row['item_sku'] |
|
|
60 |
|
raise barcode.EmptyFieldWarning('generic_keywords') |
|
61 |
|
except Exception: |
|
62 |
|
error.warnPrint( |
|
63 |
|
msg="Generic Keywords are empty!", err='', |
|
64 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
65 |
|
|
|
66 |
|
item_price = row['standard_price'] |
|
67 |
|
if not item_price and row['parent_child'] == 'parent': |
|
68 |
|
item_price = price.find_price(flatfile, row['item_sku']) |
|
69 |
|
if item_price == -1: |
|
70 |
|
if os.name == 'nt': |
|
71 |
|
print("press ENTER to continue..") |
|
72 |
|
input() |
|
73 |
|
sys.exit(1) |
|
74 |
|
if item_price == '': |
|
75 |
|
error.warnPrint( |
|
76 |
|
msg=str(f"{row['item_sku']},has no price"), err='', |
|
77 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
78 |
|
|
|
79 |
|
|
|
80 |
|
try: |
|
81 |
|
attributes = '' |
|
82 |
|
if row['parent_child'] == 'parent': |
|
83 |
|
is_parent = True |
|
84 |
|
group_parent = row['item_sku'] |
|
85 |
|
position = 0 |
|
86 |
|
if row['parent_child'] == 'child': |
|
87 |
|
is_parent = False |
|
88 |
|
attributes = getAttributes(dataset=row, |
|
89 |
|
sets=color_size_sets) |
|
90 |
|
if(group_parent and row['parent_sku'] == group_parent): |
|
91 |
|
position += 1 |
|
92 |
|
except Exception as err: |
|
93 |
|
error.warnPrint(msg="Attribute setting failed", err=err, |
|
94 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
|
95 |
|
try: |
|
96 |
|
values = [ |
|
97 |
|
row['parent_sku'], row['item_sku'], |
|
98 |
|
is_parent, |
|
99 |
|
package_properties['length'] * 10, |
|
100 |
|
package_properties['width'] * 10, |
|
101 |
|
package_properties['height'] * 10, |
|
102 |
|
package_properties['weight'], |
|
103 |
|
row['item_name'], '104', |
|
104 |
|
attributes, position, |
|
105 |
|
keywords, |
|
106 |
|
input_data['name'], row['product_description'], |
|
107 |
|
'', # externalID |
|
108 |
|
input_data['categories'], |
|
109 |
|
input_data['categories'][0:3], |
|
110 |
|
input_data['categories'][0:3], |
|
111 |
|
input_data['categories'], |
|
112 |
|
input_data['categories'][0:3], input_data['categories'][0:3], |
|
113 |
|
'', '', # barcode |
|
114 |
|
'', '', # market & accout id amazonsku |
|
115 |
|
'', '', # sku & parentsku amazonsku |
|
116 |
|
amazon.get_producttype_id(source=flatfile, |
|
117 |
|
sku=row['item_sku']), |
|
118 |
|
item_price, # prices |
|
119 |
|
'', '', '', #asin |
|
120 |
|
input_data['marking'] |
|
121 |
|
] |
|
122 |
|
|
|
123 |
|
except KeyError as kerr: |
|
124 |
|
error.warnPrint( |
|
125 |
|
msg='column name not found in flatfile', |
|
126 |
|
err=kerr, |
|
127 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
128 |
|
raise KeyError |
|
129 |
|
except Exception as err: |
|
130 |
|
error.errorPrint(msg="setting values failed", err=err, |
|
131 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
|
132 |
|
|
|
133 |
|
data[row['item_sku']] =\ |
|
134 |
|
collections.OrderedDict(zip(column_names, values)) |
140 |
135 |
|
|
141 |
136 |
# open the intern number xlsx to get the external id |
# open the intern number xlsx to get the external id |
142 |
137 |
get_externalid(dataset=data, numberlist=intern) |
get_externalid(dataset=data, numberlist=intern) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, folder, input_data, filename): |
152 |
147 |
data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] |
data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] |
153 |
148 |
data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] |
data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] |
154 |
149 |
except Exception as err: |
except Exception as err: |
155 |
|
error.errorPrint("Barcode part for "+row, err, |
|
156 |
|
sys.exc_info()[2].tb_lineno) |
|
|
150 |
|
error.errorPrint( |
|
151 |
|
msg=str(f"Barcode part for {row}"), err=err, |
|
152 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
157 |
153 |
|
|
158 |
154 |
# Include the amazonsku |
# Include the amazonsku |
159 |
155 |
sku_data = amazon.amazonSkuUpload(flatfile) |
sku_data = amazon.amazonSkuUpload(flatfile) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, folder, input_data, filename): |
166 |
162 |
data[row]['amazon_sku'] = sku_data[row]['SKU'] |
data[row]['amazon_sku'] = sku_data[row]['SKU'] |
167 |
163 |
data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] |
data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] |
168 |
164 |
except Exception as err: |
except Exception as err: |
169 |
|
error.errorPrint("SKU part for "+row, err, |
|
170 |
|
sys.exc_info()[2].tb_lineno) |
|
|
165 |
|
error.errorPrint( |
|
166 |
|
msg=str(f"SKU part for {row}"), err=err, |
|
167 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
171 |
168 |
|
|
172 |
169 |
# Sort the dictionary to make sure that the parents are the first variant of each item |
# Sort the dictionary to make sure that the parents are the first variant of each item |
173 |
170 |
sorted_data = sortProducts(data) |
sorted_data = sortProducts(data) |
174 |
171 |
|
|
175 |
172 |
barcode.writeCSV(sorted_data, "item", column_names, folder, filename) |
barcode.writeCSV(sorted_data, "item", column_names, folder, filename) |
176 |
|
except UnicodeDecodeError as err: |
|
177 |
|
error.errorPrint("decoding problem", err, |
|
178 |
|
sys.exc_info()[2].tb_lineno) |
|
179 |
|
if os.name == 'nt': |
|
180 |
|
print("press ENTER to continue..") |
|
181 |
|
input() |
|
182 |
|
exit(1) |
|
183 |
173 |
|
|
184 |
174 |
def itemPropertyUpload(flatfile, folder, filename): |
def itemPropertyUpload(flatfile, folder, filename): |
185 |
175 |
|
|
|
... |
... |
def itemPropertyUpload(flatfile, folder, filename): |
222 |
212 |
|
|
223 |
213 |
for row in reader: |
for row in reader: |
224 |
214 |
if row['parent_child'] == 'parent': |
if row['parent_child'] == 'parent': |
225 |
|
try: |
|
226 |
|
use_names =\ |
|
227 |
|
[i for i in property_names if i in list(row.keys())] |
|
228 |
|
values = [row[i] for i in use_names] |
|
229 |
|
except ValueError as err: |
|
230 |
|
error.warnPrint("No Value", |
|
231 |
|
sys.exc_info()[2].tb_lineno, err) |
|
|
215 |
|
use_names =\ |
|
216 |
|
[i for i in property_names if i in list(row.keys())] |
|
217 |
|
values = [row[i] for i in use_names] |
232 |
218 |
|
|
233 |
219 |
# Check for empty values |
# Check for empty values |
234 |
220 |
properties[row['item_sku']] = dict(zip(use_names, values)) |
properties[row['item_sku']] = dict(zip(use_names, values)) |
|
... |
... |
def itemPropertyUpload(flatfile, folder, filename): |
243 |
229 |
|
|
244 |
230 |
data[row + prop] = dict(zip(column_names, values)) |
data[row + prop] = dict(zip(column_names, values)) |
245 |
231 |
except KeyError as kerr: |
except KeyError as kerr: |
246 |
|
error.errorPrint("Key was not found in the flatfile", kerr, |
|
247 |
|
sys.exc_info()[2].tb_lineno) |
|
|
232 |
|
error.errorPrint( |
|
233 |
|
msg="Key was not found in the flatfile", err=kerr, |
|
234 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
248 |
235 |
|
|
249 |
236 |
|
|
250 |
237 |
barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename) |
barcode.writeCSV(data, "Item_Merkmale", column_names, folder, filename) |
|
... |
... |
def getProperties(flatfile): |
290 |
277 |
except KeyError as err: |
except KeyError as err: |
291 |
278 |
msg = str(f"getProperties key: {err} not found") |
msg = str(f"getProperties key: {err} not found") |
292 |
279 |
error.errorPrint(msg=msg, err='', |
error.errorPrint(msg=msg, err='', |
293 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
|
294 |
|
except Exception as err: |
|
295 |
|
error.errorPrint( |
|
296 |
|
msg="getProperties setting values failed", err=err, |
|
297 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
|
|
280 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
298 |
281 |
|
|
299 |
282 |
return properties |
return properties |
300 |
283 |
|
|
|
... |
... |
def getAttributes(dataset, sets): |
322 |
305 |
print("{0} not found in {1}" |
print("{0} not found in {1}" |
323 |
306 |
.format(dataset['parent_sku'], ','.join(list(sets.keys())))) |
.format(dataset['parent_sku'], ','.join(list(sets.keys())))) |
324 |
307 |
except Exception as err: |
except Exception as err: |
325 |
|
error.errorPrint("Adding of color attribute failed", err, |
|
326 |
|
sys.exc_info()[2].tb_lineno) |
|
|
308 |
|
error.errorPrint( |
|
309 |
|
msg="Adding color attribute failed", err=err, |
|
310 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
327 |
311 |
try: |
try: |
328 |
312 |
if len(sets[dataset['parent_sku']]['size_name']) > 1: |
if len(sets[dataset['parent_sku']]['size_name']) > 1: |
329 |
313 |
if not output_string: |
if not output_string: |
|
... |
... |
def getAttributes(dataset, sets): |
331 |
315 |
else: |
else: |
332 |
316 |
output_string = output_string + ';size_name:' + dataset['size_name'] |
output_string = output_string + ';size_name:' + dataset['size_name'] |
333 |
317 |
except Exception as err: |
except Exception as err: |
334 |
|
error.errorPrint("Adding of size attribute failed", err, |
|
335 |
|
sys.exc_info()[2].tb_lineno) |
|
|
318 |
|
error.errorPrint( |
|
319 |
|
msg="Adding of size attribute failed", err=err, |
|
320 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
336 |
321 |
return output_string |
return output_string |
337 |
322 |
|
|
338 |
323 |
def findSimilarAttr(flatfile, attribute): |
def findSimilarAttr(flatfile, attribute): |
|
... |
... |
def searchChild(item_list, parent): |
397 |
382 |
return child_dict |
return child_dict |
398 |
383 |
|
|
399 |
384 |
def checkFlatfile(flatfile): |
def checkFlatfile(flatfile): |
400 |
|
try: |
|
401 |
|
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
|
402 |
|
reader = csv.DictReader(item, delimiter=';') |
|
403 |
|
|
|
404 |
|
first_row = [*list(reader)[0]] |
|
405 |
|
if len(first_row) == 1: |
|
406 |
|
error.errorPrint("Wrong delimiter, use ';'", |
|
407 |
|
'False delimiter detected', |
|
408 |
|
inspect.currentframe().f_back.f_lineno) |
|
409 |
|
return False |
|
410 |
|
|
|
411 |
|
if not 'feed_product_type' in first_row: |
|
412 |
|
if 'Marke' in first_row: |
|
413 |
|
error.errorPrint("Only use the last of the 3 header lines", |
|
414 |
|
err='', |
|
415 |
|
linenumber=inspect.currentframe() |
|
416 |
|
.f_back.f_lineno) |
|
417 |
|
print("Please cut the first two rows from the flatfile for this script\n") |
|
418 |
|
return False |
|
419 |
|
error.errorPrint("Wrong header line", err='', |
|
420 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
|
385 |
|
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
|
386 |
|
reader = csv.DictReader(item, delimiter=';') |
|
387 |
|
|
|
388 |
|
first_row = [*list(reader)[0]] |
|
389 |
|
if len(first_row) == 1: |
|
390 |
|
error.errorPrint( |
|
391 |
|
msg='False delimiter detected', |
|
392 |
|
err="Wrong delimiter, use ';'", |
|
393 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
394 |
|
return False |
|
395 |
|
|
|
396 |
|
if not 'feed_product_type' in first_row: |
|
397 |
|
if 'Marke' in first_row: |
|
398 |
|
error.errorPrint( |
|
399 |
|
msg="Remove the first 2 rows of the amazon flatfile", |
|
400 |
|
err='', |
|
401 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
421 |
402 |
return False |
return False |
422 |
|
return True |
|
423 |
|
|
|
424 |
|
except Exception as err: |
|
425 |
|
error.warnPrint("Flatfile check failed", |
|
426 |
|
sys.exc_info()[2].tb_lineno, err) |
|
|
403 |
|
error.errorPrint( |
|
404 |
|
msg="Wrong header line", err='', |
|
405 |
|
linenumber=inspect.currentframe().f_back.f_lineno) |
|
406 |
|
return False |
|
407 |
|
return True |
427 |
408 |
|
|
428 |
409 |
def checkEncoding(file_dict): |
def checkEncoding(file_dict): |
429 |
|
try: |
|
430 |
|
with open(file_dict['path'], mode='rb') as item: |
|
431 |
|
try: |
|
432 |
|
raw_data = item.read() |
|
433 |
|
except Exception as err: |
|
434 |
|
print("ERROR: {0}\n".format(err)) |
|
435 |
|
error.errorPrint("check Encoding reading failed", err, |
|
436 |
|
sys.exc_info()[2].tb_lineno) |
|
437 |
|
file_dict['encoding'] = chardet.detect(raw_data)['encoding'] |
|
438 |
|
print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) |
|
439 |
|
|
|
440 |
|
except Exception as err: |
|
441 |
|
error.errorPrint("check Encoding failed", err, |
|
442 |
|
sys.exc_info()[2].tb_lineno) |
|
|
410 |
|
with open(file_dict['path'], mode='rb') as item: |
|
411 |
|
try: |
|
412 |
|
raw_data = item.read() |
|
413 |
|
except Exception as err: |
|
414 |
|
print("ERROR: {0}\n".format(err)) |
|
415 |
|
error.errorPrint( |
|
416 |
|
msg="check Encoding reading failed", err=err, |
|
417 |
|
linenumber=sys.exc_info()[2].tb_lineno) |
|
418 |
|
file_dict['encoding'] = chardet.detect(raw_data)['encoding'] |
|
419 |
|
print("chardet data for {0}\n{1}\n" |
|
420 |
|
.format(file_dict['path'], chardet.detect(raw_data))) |
443 |
421 |
|
|
444 |
422 |
return file_dict |
return file_dict |
445 |
423 |
|
|
|
... |
... |
def get_variation_id(exportfile, sku): |
461 |
439 |
exp = pandas.read_csv(exportfile, |
exp = pandas.read_csv(exportfile, |
462 |
440 |
sep=';') |
sep=';') |
463 |
441 |
|
|
464 |
|
if not len(exp.index): |
|
|
442 |
|
if len(exp.index) == 0: |
465 |
443 |
error.warnPrint( |
error.warnPrint( |
466 |
444 |
msg='exp is empty, skip variation ID', err='', |
msg='exp is empty, skip variation ID', err='', |
467 |
445 |
linenumber=inspect.currentframe().f_back.f_lineno) |
linenumber=inspect.currentframe().f_back.f_lineno) |
468 |
446 |
return 0 |
return 0 |
469 |
447 |
|
|
470 |
|
if(not len(exp.columns[exp.columns.str.contains(pat='Variation.id')]) or |
|
471 |
|
not len(exp.columns[exp.columns.str.contains(pat='Variation.number')])): |
|
|
448 |
|
if(len(exp.columns[exp.columns.str.contains(pat='Variation.id')]) == 0 or |
|
449 |
|
len(exp.columns[exp.columns.str.contains(pat='Variation.number')]) == 0): |
472 |
450 |
error.warnPrint( |
error.warnPrint( |
473 |
451 |
msg="Exportfile requires fields 'Variation.id'&'Variation.number'", |
msg="Exportfile requires fields 'Variation.id'&'Variation.number'", |
474 |
452 |
err='', linenumber=inspect.currentframe().f_back.f_lineno) |
err='', linenumber=inspect.currentframe().f_back.f_lineno) |
475 |
453 |
return 0 |
return 0 |
476 |
454 |
|
|
477 |
455 |
variation = exp[exp['Variation.number'] == sku] |
variation = exp[exp['Variation.number'] == sku] |
478 |
|
if not len(variation.index): |
|
|
456 |
|
if len(variation.index) == 0: |
479 |
457 |
error.warnPrint( |
error.warnPrint( |
480 |
458 |
msg=str(f"{sku} not found in Plentymarkets export"), |
msg=str(f"{sku} not found in Plentymarkets export"), |
481 |
459 |
err='', linenumber=inspect.currentframe().f_back.f_lineno) |
err='', linenumber=inspect.currentframe().f_back.f_lineno) |
|
... |
... |
def get_externalid(dataset, numberlist): |
505 |
483 |
extern_id = pandas.read_excel(numberlist['path']) |
extern_id = pandas.read_excel(numberlist['path']) |
506 |
484 |
except xlrd.biffh.XLRDError as err: |
except xlrd.biffh.XLRDError as err: |
507 |
485 |
error.errorPrint( |
error.errorPrint( |
508 |
|
msg=str(f"..{intern['path'][-30:]} requires type [.xlsx]"), |
|
|
486 |
|
msg=str(f"..{numberlist['path'][-30:]} requires type [.xlsx]"), |
509 |
487 |
err=err, linenumber=sys.exc_info()[2].tb_lineno) |
err=err, linenumber=sys.exc_info()[2].tb_lineno) |
510 |
488 |
if os.name == 'nt': |
if os.name == 'nt': |
511 |
489 |
print("press ENTER to continue..") |
print("press ENTER to continue..") |
512 |
490 |
input() |
input() |
513 |
|
exit(1) |
|
|
491 |
|
sys.exit(1) |
514 |
492 |
|
|
515 |
493 |
if extern_id.empty: |
if extern_id.empty: |
516 |
494 |
error.warnPrint( |
error.warnPrint( |