File packages/UploadGUI.py added (mode: 100644) (index 0000000..91249c7) |
|
1 |
|
import tkinter |
|
2 |
|
from os.path import isfile |
|
3 |
|
|
|
4 |
|
#class checkBoxes(tkinter.Frame): |
|
5 |
|
#def __init__(self, master, |
|
6 |
|
|
|
7 |
|
|
|
8 |
|
|
|
9 |
|
class UploadGUI(tkinter.Tk): |
|
10 |
|
def __init__(self, master): |
|
11 |
|
tkinter.Tk.__init__(self, master) |
|
12 |
|
self.master = master |
|
13 |
|
self.initialize() |
|
14 |
|
|
|
15 |
|
|
|
16 |
|
def initialize(self): |
|
17 |
|
self.grid() |
|
18 |
|
|
|
19 |
|
self.bg = "chocolate1" |
|
20 |
|
self.txtcolor = "black" |
|
21 |
|
self.bordercolor = "red4" |
|
22 |
|
if(isfile('/home/basti/Documents/PyProjects/CSV-Project/PlentyMarkets/packages/gfx/checkbox-unchecked.png')): |
|
23 |
|
self.uncheckedimg = tkinter.PhotoImage(file='/home/basti/Documents/PyProjects/CSV-Project/PlentyMarkets/packages/gfx/checkbox-unchecked.png') |
|
24 |
|
print("OK img1") |
|
25 |
|
if(isfile('/home/basti/Documents/PyProjects/CSV-Project/PlentyMarkets/packages/gfx/checkbox-checked.png')): |
|
26 |
|
self.checkedimg = tkinter.PhotoImage(file='/home/basti/Documents/PyProjects/CSV-Project/PlentyMarkets/packages/gfx/checkbox-checked.png') |
|
27 |
|
print("OK img2") |
|
28 |
|
|
|
29 |
|
self.header = tkinter.Label(self, text="Choose which upload files you want to create and upload the required files.\n", |
|
30 |
|
anchor="w",fg=self.txtcolor,bg=self.bg) |
|
31 |
|
self.header.grid(column=0, row=0, columnspan=5, rowspan=2, sticky="EW") |
|
32 |
|
|
|
33 |
|
|
|
34 |
|
self.itemup_desc = tkinter.Label(self, text="Item Upload(Parent)\t", |
|
35 |
|
anchor="center", fg=self.txtcolor, bg=self.bg) |
|
36 |
|
self.itemup_desc.grid(column=0, row=3, sticky="EW") |
|
37 |
|
|
|
38 |
|
self.variationup_desc = tkinter.Label(self, text="Variation Upload(children)\t", |
|
39 |
|
anchor="center", fg=self.txtcolor, bg=self.bg) |
|
40 |
|
self.variationup_desc.grid(column=1, row=3, sticky="EW") |
|
41 |
|
|
|
42 |
|
self.activeup_desc = tkinter.Label(self, text="variation status to active\t", |
|
43 |
|
anchor="center", fg=self.txtcolor, bg=self.bg) |
|
44 |
|
self.activeup_desc.grid(column=2, row=3, sticky="EW") |
|
45 |
|
|
|
46 |
|
self.barcodeup_desc = tkinter.Label(self, text="barcode Upload\t", |
|
47 |
|
anchor="center", fg=self.txtcolor, bg=self.bg) |
|
48 |
|
self.barcodeup_desc.grid(column=3, row=3, sticky="EW") |
|
49 |
|
|
|
50 |
|
#Trying to use a canvas as a checkbox for cosmetic reasons |
|
51 |
|
#========================================================= |
|
52 |
|
|
|
53 |
|
#self.itemup_check = tkinter.Canvas(width=150,height=40,bg=self.bg |
|
54 |
|
#, bd=0, highlightthickness=0) |
|
55 |
|
#self.itemup_check.bind("<Button-1>", lambda event, item = self.itemup_check, check = self.item_state, image1 = self.uncheckedimg, image2 = self.checkedimg : self.switchImage(item, check, image1, image2)) |
|
56 |
|
#self.itemup_check.create_image(0,0,image=self.uncheckedimg,anchor="nw") |
|
57 |
|
#self.itemup_check.grid(column=0, row=4,sticky="EW") |
|
58 |
|
|
|
59 |
|
#self.variationup_check = tkinter.Canvas(width=150,height=40,bg=self.bg |
|
60 |
|
#, bd=0, highlightthickness=0) |
|
61 |
|
#self.variationup_check.bind("<Button-1>", ) |
|
62 |
|
#self.variationup_check.create_image(0,0,image=self.uncheckedimg,anchor="nw") |
|
63 |
|
#self.variationup_check.grid(column=1, row=4,sticky="EW") |
|
64 |
|
|
|
65 |
|
#self.active_check = tkinter.Canvas(width=150,height=40,bg=self.bg |
|
66 |
|
#, bd=0, highlightthickness=0) |
|
67 |
|
#self.active_check.bind("<Button-1>", ) |
|
68 |
|
#self.active_check.create_image(0,0,image=self.uncheckedimg,anchor="nw") |
|
69 |
|
#self.active_check.grid(column=2, row=4,sticky="EW") |
|
70 |
|
|
|
71 |
|
#self.barcodeup_check = tkinter.Canvas(width=150,height=40,bg=self.bg |
|
72 |
|
#, bd=0, highlightthickness=0) |
|
73 |
|
#self.barcodeup_check.bind("<Button-1>", ) |
|
74 |
|
#self.barcodeup_check.create_image(0,0,image=self.uncheckedimg,anchor="nw") |
|
75 |
|
#self.barcodeup_check.grid(column=3, row=4,sticky="EW") |
|
76 |
|
|
|
77 |
|
|
|
78 |
|
#def switchImage(self, element, check, image1, image2): |
|
79 |
|
#for state in [self.item_state, self.variation_state, self.active_state, self.barcode_state]: |
|
80 |
|
#if(id(check) == id(state)): |
|
81 |
|
#if(state): |
|
82 |
|
#element.create_image(0,0,image=image1,anchor="nw") |
|
83 |
|
#else: |
|
84 |
|
#element.create_image(0,0,image=image2,anchor="nw") |
|
85 |
|
|
File packages/attribute_upload.py added (mode: 100644) (index 0000000..84e0e83) |
|
1 |
|
from csv import DictWriter, DictReader |
|
2 |
|
from os.path import isfile |
|
3 |
|
try: |
|
4 |
|
from sortedcontainers import SortedDict |
|
5 |
|
except ImportError: |
|
6 |
|
print("the sortedcontainers module is required to run this program.") |
|
7 |
|
raise ImportError |
|
8 |
|
|
|
9 |
|
|
|
10 |
|
def attributeUpload(filepath): |
|
11 |
|
#The list of column names for the upload file |
|
12 |
|
column_names_output = ['AttributeBackendName','AttributeContentPage','AttributeFrontendName','AttributeID' |
|
13 |
|
,'AttributeImage','AttributePosition','AttributeValueBackendName','AttributeValueComment' |
|
14 |
|
,'AttributeValueFrontendName','AttributeValueID','AttributeValuePosition','Lang'] |
|
15 |
|
|
|
16 |
|
#initialize a Dict and a set for each attribute |
|
17 |
|
Data = SortedDict() |
|
18 |
|
colorset = set() |
|
19 |
|
sizeset = set() |
|
20 |
|
materialset = set() |
|
21 |
|
|
|
22 |
|
#open the flatfile and add each color, size and material to a set |
|
23 |
|
with open(filepath, mode='r') as item: |
|
24 |
|
reader = DictReader(item, delimiter=';') |
|
25 |
|
for row in reader: |
|
26 |
|
if(not(row['color_name']=='')): |
|
27 |
|
colorset.add(row['color_name']) |
|
28 |
|
if(not(row['size_name']=='')): |
|
29 |
|
sizeset.add(row['size_name']) |
|
30 |
|
if(not(row['outer_material_type']=='')): |
|
31 |
|
materialset.add(row['outer_material_type']) |
|
32 |
|
|
|
33 |
|
for i in range(len(colorset)): |
|
34 |
|
values = ['color_name','0','Farbname', '4', '0', '1', list(colorset)[i], '', list(colorset)[i], '',i+1, 'de'] |
|
35 |
|
Data['color' + str(i+1)] = SortedDict(zip(column_names_output, values)) |
|
36 |
|
for i in range(len(sizeset)): |
|
37 |
|
values = ['size_name','0','Größe', '5', '0', '2', list(sizeset)[i], '', list(sizeset)[i], '',i+1, 'de'] |
|
38 |
|
Data['size' + str(i+1)] = SortedDict(zip(column_names_output, values)) |
|
39 |
|
for i in range(len(materialset)): |
|
40 |
|
values = ['material_name','0','Material', '6', '0', '3', list(materialset)[i], '', list(materialset)[i], '',i+1, 'de'] |
|
41 |
|
Data['material' + str(i+1)] = SortedDict(zip(column_names_output, values)) |
|
42 |
|
|
|
43 |
|
#Write Data into new CSV for Upload |
|
44 |
|
#OUTPUT |
|
45 |
|
#-------------------------------------------------------------- |
|
46 |
|
|
|
47 |
|
output_path_number = 1 |
|
48 |
|
datatype = ".csv" |
|
49 |
|
output_path = "Upload/attribute_upload_" + str(output_path_number) + datatype |
|
50 |
|
|
|
51 |
|
while(isfile(output_path)): |
|
52 |
|
output_path_number = int(output_path_number) + 1 |
|
53 |
|
output_path = "Upload/attribute_upload_" + str(output_path_number) + datatype |
|
54 |
|
|
|
55 |
|
with open(output_path, mode='a') as item: |
|
56 |
|
writer = DictWriter(item, delimiter = ";", fieldnames=column_names_output) |
|
57 |
|
writer.writeheader() |
|
58 |
|
for row in Data: |
|
59 |
|
writer.writerow(Data[row]) |
|
60 |
|
|
|
61 |
|
if(isfile(output_path)): |
|
62 |
|
print("Upload file successfully created under {0}".format(output_path)) |
File packages/item_upload.py added (mode: 100644) (index 0000000..f0872a2) |
|
1 |
|
from csv import DictReader, DictWriter |
|
2 |
|
from os.path import isfile |
|
3 |
|
from sys import exit |
|
4 |
|
try: |
|
5 |
|
from sortedcontainers import SortedDict |
|
6 |
|
except ImportError: |
|
7 |
|
print("the sortedcontainers module is required to run this program.") |
|
8 |
|
raise ImportError |
|
9 |
|
#from ..packages.category import determineCategory |
|
10 |
|
|
|
11 |
|
|
|
12 |
|
def itemUpload(filepath, intern_number): |
|
13 |
|
# The column headers for the output file as expected from the plentymarkets dataformat |
|
14 |
|
column_names_output = ['CategoryLevel1Name', 'CategoryLevel2Name', 'CategoryLevel3Name', 'CategoryLevel4Name', 'CategoryLevel5Name', 'CategoryLevel6Name', 'ItemID', 'PrimaryVariationCustomNumber', 'PrimaryVariationLengthMM', 'PrimaryVariationWidthMM', 'PrimaryVariationHeightMM', 'PrimaryVariationWeightG', 'PrimaryVariationName', 'PrimaryVariationPurchasePrice', 'ItemImageURL', 'PrimaryVariationMainWarehouse', 'ItemOriginCountry', 'ItemProducer', 'ItemProducerID', 'ItemTextName', 'ItemTextDescription'] |
|
15 |
|
|
|
16 |
|
# default values: CategoryLevel5Name : '' , CategoryLevel6Name : '', ItemOriginCountry : '62' , ItemProducer : 'PANASIAM', ItemProducerID : '3' |
|
17 |
|
|
|
18 |
|
# Unpack File and scrap data |
|
19 |
|
# INPUT |
|
20 |
|
# -------------------------------------------------------------- |
|
21 |
|
Data = SortedDict() |
|
22 |
|
|
|
23 |
|
with open(filepath, mode='r') as item: |
|
24 |
|
reader = DictReader(item, delimiter=";") |
|
25 |
|
for row in reader: |
|
26 |
|
# if the item is a parent scrap the name and the desc from the flatfile |
|
27 |
|
if(row['parent_child'] == 'parent'): |
|
28 |
|
try: |
|
29 |
|
if(row['package_height'] and row['package_length'] and row['package_width']): |
|
30 |
|
row['package_height'] = int(row['package_height']) |
|
31 |
|
row['package_length'] = int(row['package_length']) |
|
32 |
|
row['package_width'] = int(row['package_width']) |
|
33 |
|
except ValueError as err: |
|
34 |
|
row['package_height'] = int(float(row['package_height'])) |
|
35 |
|
row['package_length'] = int(float(row['package_length'])) |
|
36 |
|
row['package_width'] = int(float(row['package_width'])) |
|
37 |
|
except ValueError as err: |
|
38 |
|
print(err) |
|
39 |
|
print("/nPlease copy the values for height, length, width and weight\nfrom the children to the parent variation in the flatfile.\n") |
|
40 |
|
exit() |
|
41 |
|
try: |
|
42 |
|
values = ['', '', '', '', '', '', '', row['item_sku'], row['package_length'] * 10, row['package_width'] * 10, row['package_height'] * 10, row['package_weight'], row['item_name'], row['standard_price'], row['main_image_url'], 'Badel', '62', 'PANASIAM', '3', '', row['product_description']] |
|
43 |
|
except Exception as err: |
|
44 |
|
print(err) |
|
45 |
|
Data[row['item_sku']] = SortedDict(zip(column_names_output, values)) |
|
46 |
|
|
|
47 |
|
# open the intern number csv to get the item ID |
|
48 |
|
with open(intern_number, mode='r') as item: |
|
49 |
|
reader = DictReader(item, delimiter=";") |
|
50 |
|
for row in reader: |
|
51 |
|
if(row['amazon_sku'] in [*Data]): |
|
52 |
|
Data[row['amazon_sku']]['ItemID'] = row['article_id'] |
|
53 |
|
|
|
54 |
|
# Write Data into new CSV for Upload |
|
55 |
|
# OUTPUT |
|
56 |
|
# -------------------------------------------------------------- |
|
57 |
|
|
|
58 |
|
output_path_number = 1 |
|
59 |
|
datatype = ".csv" |
|
60 |
|
output_path = "Upload/item_upload_" + str(output_path_number) + datatype |
|
61 |
|
|
|
62 |
|
while(isfile(output_path)): |
|
63 |
|
output_path_number = int(output_path_number) + 1 |
|
64 |
|
output_path = "Upload/item_upload_" + str(output_path_number) + datatype |
|
65 |
|
|
|
66 |
|
with open(output_path, mode='a') as item: |
|
67 |
|
writer = DictWriter(item, delimiter=";", fieldnames=column_names_output) |
|
68 |
|
writer.writeheader() |
|
69 |
|
for row in Data: |
|
70 |
|
writer.writerow(Data[row]) |
|
71 |
|
|
|
72 |
|
if(isfile(output_path)): |
|
73 |
|
print("Upload file successfully created under {0}".format(output_path)) |
|
74 |
|
|
|
75 |
|
return output_path |
File packages/stock_upload.py added (mode: 100644) (index 0000000..6f2268f) |
|
1 |
|
from csv import DictReader, DictWriter |
|
2 |
|
from os.path import isfile |
|
3 |
|
try: |
|
4 |
|
from sortedcontainers import SortedDict |
|
5 |
|
except ImportError: |
|
6 |
|
print("the sortedcontainers module is required to run this program.") |
|
7 |
|
raise ImportError |
|
8 |
|
|
|
9 |
|
|
|
10 |
|
def writeCSV(dataobject, name, columns): |
|
11 |
|
'''Write Data into new CSV for Upload |
|
12 |
|
OUTPUT |
|
13 |
|
''' |
|
14 |
|
|
|
15 |
|
output_path_number = 1 |
|
16 |
|
datatype = ".csv" |
|
17 |
|
output_path = "Upload/" + name + "_upload_" + str(output_path_number) + datatype |
|
18 |
|
|
|
19 |
|
while(isfile(output_path)): |
|
20 |
|
output_path_number = int(output_path_number) + 1 |
|
21 |
|
output_path = "Upload/" + name + "_upload_" + str(output_path_number) + datatype |
|
22 |
|
|
|
23 |
|
with open(output_path, mode='a') as item: |
|
24 |
|
writer = DictWriter(item, delimiter=";", fieldnames=columns) |
|
25 |
|
writer.writeheader() |
|
26 |
|
for row in dataobject: |
|
27 |
|
writer.writerow(dataobject[row]) |
|
28 |
|
|
|
29 |
|
if(isfile(output_path)): |
|
30 |
|
print("Upload file successfully created under {0}".format(output_path)) |
|
31 |
|
|
|
32 |
|
return output_path |
|
33 |
|
|
|
34 |
|
|
|
35 |
|
def stockUpload(flatfile, export, stocklist): |
|
36 |
|
|
|
37 |
|
# The column header names |
|
38 |
|
column_names = ['Barcode','LocationID','LocationName','Reordered','ReservedStock','Stock','WarehouseID','VariationID','VariationNo'] |
|
39 |
|
|
|
40 |
|
# create a Data Dictionary and fill it with the necessary values from the flatfile |
|
41 |
|
Data = SortedDict() |
|
42 |
|
|
|
43 |
|
with open(flatfile, mode='r') as item: |
|
44 |
|
reader = DictReader(item, delimiter=";") |
|
45 |
|
for row in reader: |
|
46 |
|
if(row['external_product_id']): |
|
47 |
|
values = [row['external_product_id'],0,'Standard-Lagerort','','','','104','',row['item_sku']] |
|
48 |
|
Data[row['item_sku']] = SortedDict(zip(column_names, values)) |
|
49 |
|
|
|
50 |
|
with open(export, mode='r') as item: |
|
51 |
|
reader = DictReader(item, delimiter=";") |
|
52 |
|
for row in reader: |
|
53 |
|
if(row['VariationNumber'] in [*Data]): |
|
54 |
|
Data[row['VariationNumber']]['VariationID'] = row['VariationID'] |
|
55 |
|
|
|
56 |
|
with open(stocklist, mode='r') as item: |
|
57 |
|
reader = DictReader(item, delimiter=";") |
|
58 |
|
for row in reader: |
|
59 |
|
if(row['MASTER'] and row['MASTER'] in [*Data]): |
|
60 |
|
Data[row['MASTER']]['Stock'] = row['BADEL 26.12.16'] |
|
61 |
|
|
|
62 |
|
output_path = writeCSV(Data, 'stock', column_names) |
|
63 |
|
|
|
64 |
|
|
|
65 |
|
def priceUpload(flatfile, export): |
|
66 |
|
# The column header names |
|
67 |
|
column_names = ['VariationID','IsNet','VariationPrice','SalesPriceID'] |
|
68 |
|
|
|
69 |
|
# create a Data Dictionary and fill it with the necessary values from the flatfile |
|
70 |
|
Data = SortedDict() |
|
71 |
|
|
|
72 |
|
with open(flatfile, mode='r') as item: |
|
73 |
|
reader = DictReader(item, delimiter=";") |
|
74 |
|
for row in reader: |
|
75 |
|
if(row['external_product_id']): |
|
76 |
|
values = ['',0,row['standard_price'],1] |
|
77 |
|
Data[row['item_sku']] = SortedDict(zip(column_names, values)) |
|
78 |
|
|
|
79 |
|
with open(export, mode='r') as item: |
|
80 |
|
reader = DictReader(item, delimiter=";") |
|
81 |
|
for row in reader: |
|
82 |
|
if(row['VariationNumber'] in [*Data]): |
|
83 |
|
Data[row['VariationNumber']]['VariationID'] = row['VariationID'] |
|
84 |
|
|
|
85 |
|
output_path = writeCSV(Data, 'price', column_names) |
File packages/variation_upload.py added (mode: 100644) (index 0000000..cd4c2fe) |
|
1 |
|
from csv import DictReader, DictWriter |
|
2 |
|
from os.path import isfile |
|
3 |
|
try: |
|
4 |
|
from sortedcontainers import SortedDict |
|
5 |
|
except ImportError: |
|
6 |
|
print("the sortedcontainers module is required to run this program.") |
|
7 |
|
raise ImportError |
|
8 |
|
|
|
9 |
|
|
|
10 |
|
def writeCSV(dataobject, name, columns): |
|
11 |
|
'''Write Data into new CSV for Upload |
|
12 |
|
OUTPUT |
|
13 |
|
''' |
|
14 |
|
|
|
15 |
|
output_path_number = 1 |
|
16 |
|
datatype = ".csv" |
|
17 |
|
output_path = "Upload/" + name + "_upload_" + str(output_path_number) + datatype |
|
18 |
|
|
|
19 |
|
while(isfile(output_path)): |
|
20 |
|
output_path_number = int(output_path_number) + 1 |
|
21 |
|
output_path = "Upload/" + name + "_upload_" + str(output_path_number) + datatype |
|
22 |
|
|
|
23 |
|
with open(output_path, mode='a') as item: |
|
24 |
|
writer = DictWriter(item, delimiter=";", fieldnames=columns) |
|
25 |
|
writer.writeheader() |
|
26 |
|
for row in dataobject: |
|
27 |
|
writer.writerow(dataobject[row]) |
|
28 |
|
|
|
29 |
|
if(isfile(output_path)): |
|
30 |
|
print("Upload file successfully created under {0}".format(output_path)) |
|
31 |
|
|
|
32 |
|
return output_path |
|
33 |
|
|
|
34 |
|
|
|
35 |
|
def variationUpload(flatfile, intern_number): |
|
36 |
|
|
|
37 |
|
# The column header names |
|
38 |
|
names = ['ItemID', 'VariationID', 'VariationNumber', 'VariationName', 'Position', 'LengthMM', 'WidthMM', 'HeightMM', 'WeightG', 'VariationAttributes', 'PurchasePrice', 'MainWarehouse', 'Availability', 'AutoStockVisible'] |
|
39 |
|
|
|
40 |
|
# create a Data Dictionary and fill it with the necessary values from the flatfile |
|
41 |
|
Data = SortedDict() |
|
42 |
|
|
|
43 |
|
with open(flatfile, mode='r') as item: |
|
44 |
|
reader = DictReader(item, delimiter=";") |
|
45 |
|
for row in reader: |
|
46 |
|
if(row['parent_child'] == 'parent'): |
|
47 |
|
item_name = row['item_name'] |
|
48 |
|
if(row['parent_child'] == 'child'): |
|
49 |
|
try: |
|
50 |
|
if(row['package_height'] and row['package_length'] and row['package_width']): |
|
51 |
|
row['package_height'] = int(row['package_height']) |
|
52 |
|
row['package_length'] = int(row['package_length']) |
|
53 |
|
row['package_width'] = int(row['package_width']) |
|
54 |
|
except ValueError as err: |
|
55 |
|
row['package_height'] = int(float(row['package_height'])) |
|
56 |
|
row['package_length'] = int(float(row['package_length'])) |
|
57 |
|
row['package_width'] = int(float(row['package_width'])) |
|
58 |
|
except ValueError as err: |
|
59 |
|
print(err) |
|
60 |
|
print("/nPlease copy the values for height, length, width and weight\nfrom the children to the parent variation in the flatfile.\n") |
|
61 |
|
exit() |
|
62 |
|
|
|
63 |
|
if(row['color_name']): |
|
64 |
|
attributes = 'color_name:' + row['color_name'] |
|
65 |
|
if(row['size_name']): |
|
66 |
|
attributes += ';size_name:' + row['size_name'] |
|
67 |
|
if(row['outer_material_type']): |
|
68 |
|
attributes += ';material_name:' + row['outer_material_type'] |
|
69 |
|
if('pattern' in [*row] and row['pattern']): |
|
70 |
|
attributes += ';pattern:' + row['pattern'] |
|
71 |
|
try: |
|
72 |
|
values = ['', '', row['item_sku'], item_name, '', int(row['package_length']) * 10, int(row['package_width']) * 10, int(row['package_height']) * 10, row['package_weight'], attributes, row['standard_price'], 'Badel', 'Y', 'Y'] |
|
73 |
|
except Exception as err: |
|
74 |
|
print(err) |
|
75 |
|
exit() |
|
76 |
|
Data[row['item_sku']] = SortedDict(zip(names, values)) |
|
77 |
|
|
|
78 |
|
# open the intern numbers csv and fill in the remaining missing fields by using the item_sku as dict key |
|
79 |
|
with open(intern_number, mode='r') as item: |
|
80 |
|
reader = DictReader(item, delimiter=';') |
|
81 |
|
for row in reader: |
|
82 |
|
# check if the sku is within the keys of the Data Dictionary |
|
83 |
|
if(row['amazon_sku'] in [*Data]): |
|
84 |
|
Data[row['amazon_sku']]['ItemID'] = row['article_id'] |
|
85 |
|
Data[row['amazon_sku']]['VariationID'] = row['full_number'] |
|
86 |
|
if(not(row['position'] == 0)): |
|
87 |
|
Data[row['amazon_sku']]['Position'] = row['position'] |
|
88 |
|
|
|
89 |
|
output_path = writeCSV(Data, 'variation', names) |
|
90 |
|
|
|
91 |
|
return output_path |
|
92 |
|
|
|
93 |
|
|
|
94 |
|
def setActive(flatfile, export): |
|
95 |
|
# because of a regulation of the plentyMarkets system the active status has to be delivered as an extra upload |
|
96 |
|
column_names = ['Active', 'ItemID', 'VariationID', 'VariationNumber'] |
|
97 |
|
Data = {} |
|
98 |
|
# open the flatfile to get the sku names |
|
99 |
|
with open(flatfile, mode='r') as item: |
|
100 |
|
reader = DictReader(item, delimiter=';') |
|
101 |
|
|
|
102 |
|
for row in reader: |
|
103 |
|
values = ['Y', '', '', row['item_sku']] |
|
104 |
|
Data[row['item_sku']] = dict(zip(column_names, values)) |
|
105 |
|
|
|
106 |
|
with open(export, mode='r') as item: |
|
107 |
|
reader = DictReader(item, delimiter=';') |
|
108 |
|
for row in reader: |
|
109 |
|
if(row['VariationNumber'] in [*Data]): |
|
110 |
|
Data[row['VariationNumber']]['ItemID'] = row['ItemID'] |
|
111 |
|
Data[row['VariationNumber']]['VariationID'] = row['VariationID'] |
|
112 |
|
output_path = writeCSV(Data, 'active', column_names) |
|
113 |
|
|
|
114 |
|
|
|
115 |
|
def EANUpload(flatfile, export): |
|
116 |
|
# open the flatfile get the ean for an sku and save it into a dictionary with columnheaders of the plentymarket dataformat |
|
117 |
|
column_names = ['BarcodeID', 'BarcodeName', 'BarcodeType', 'Code', 'VariationID', 'VariationNumber'] |
|
118 |
|
Data = {} |
|
119 |
|
with open(flatfile, mode='r') as item: |
|
120 |
|
reader = DictReader(item, delimiter=";") |
|
121 |
|
|
|
122 |
|
for row in reader: |
|
123 |
|
values = ['3', 'UPC', 'UPC', row['external_product_id'], '', row['item_sku']] |
|
124 |
|
Data[row['item_sku']] = dict(zip(column_names, values)) |
|
125 |
|
|
|
126 |
|
# open the exported file to get the variation id |
|
127 |
|
with open(export, mode='r') as item: |
|
128 |
|
reader = DictReader(item, delimiter=";") |
|
129 |
|
|
|
130 |
|
for row in reader: |
|
131 |
|
if(row['VariationNumber'] in [*Data]): |
|
132 |
|
Data[row['VariationNumber']]['VariationID'] = row['VariationID'] |
|
133 |
|
|
|
134 |
|
output_path = writeCSV(Data, 'EAN', column_names) |
File product_import.py added (mode: 100644) (index 0000000..030778f) |
|
1 |
|
from tkinter import Tk |
|
2 |
|
from tkinter.filedialog import askopenfilename |
|
3 |
|
from sys import exit |
|
4 |
|
from packages.item_upload import itemUpload |
|
5 |
|
from packages.attribute_upload import attributeUpload |
|
6 |
|
from packages.variation_upload import variationUpload, setActive, EANUpload |
|
7 |
|
from packages.stock_upload import stockUpload, priceUpload |
|
8 |
|
from packages.UploadGUI import UploadGUI |
|
9 |
|
|
|
10 |
|
|
|
11 |
|
def main(): |
|
12 |
|
#app = UploadGUI(None) |
|
13 |
|
#app.title("Amazon Flatfile to PlentyMarkets Upload") |
|
14 |
|
# app.mainloop() |
|
15 |
|
''' |
|
16 |
|
Command Line Test Version will be used within the GUI as soon as it is finished |
|
17 |
|
''' |
|
18 |
|
root = Tk() |
|
19 |
|
root.withdraw() |
|
20 |
|
sheet = askopenfilename() |
|
21 |
|
intern_number = askopenfilename() |
|
22 |
|
print("spreadsheet csv containing the flatfile : ", sheet) |
|
23 |
|
print("spreadsheet csv containing the intern numbers : ", intern_number) |
|
24 |
|
try: |
|
25 |
|
itempath = itemUpload(sheet, intern_number) |
|
26 |
|
except Exception as exc: |
|
27 |
|
print(exc) |
|
28 |
|
print("Item Upload failed!") |
|
29 |
|
|
|
30 |
|
try: |
|
31 |
|
variationpath = variationUpload(sheet, intern_number) |
|
32 |
|
except Exception as exc: |
|
33 |
|
print(exc) |
|
34 |
|
print("VariationUpload failed!") |
|
35 |
|
|
|
36 |
|
print("\nGet a dataexport from the plentymarket site from the variation attributes, in order to access the current Variation ID.\n") |
|
37 |
|
try: |
|
38 |
|
export = askopenfilename() |
|
39 |
|
except FileNotFoundError: |
|
40 |
|
print("No Export File!") |
|
41 |
|
except Exception as exc: |
|
42 |
|
print(exc) |
|
43 |
|
print("Something went wrong at the Export file import!") |
|
44 |
|
print("spreadsheet csv containing the export : ", export) |
|
45 |
|
try: |
|
46 |
|
EANUpload(sheet, export) |
|
47 |
|
setActive(sheet, export) |
|
48 |
|
priceUpload(sheet, export) |
|
49 |
|
except FileNotFoundError as err: |
|
50 |
|
print(err) |
|
51 |
|
print("Missing Data, check if you have\n - a flatfile\n - a intern file table\n - export file from plentymarkets\n - a sheet with the stock numbers!\n") |
|
52 |
|
exit() |
|
53 |
|
print("\nOpen your amazon storage report and save it as an csv.\n") |
|
54 |
|
stocklist = askopenfilename() |
|
55 |
|
print("spreadsheet csv containing the current stock : ", stocklist) |
|
56 |
|
|
|
57 |
|
stockUpload(sheet, export, stocklist) |
|
58 |
|
# try: |
|
59 |
|
|
|
60 |
|
# In case of new attributes uncomment and watch attribute_upload.py first |
|
61 |
|
# try: |
|
62 |
|
# attributeUpload(sheet) |
|
63 |
|
# except: |
|
64 |
|
#print("Attribute Upload failed!") |
|
65 |
|
|
|
66 |
|
|
|
67 |
|
if __name__ == '__main__': |
|
68 |
|
main() |
|
69 |
|
|
|
70 |
|
|