initBasti / Amazon2PlentySync (public) (License: GPLv3) (since 2019-01-27) (hash sha1)
Transfer your data from you Amazon Flatfile spreadsheet over to the Plentymarkets system. How to is included in the readme
List of commits:
Subject Hash Author Date (UTC)
General improvements and property upload bb48084db4359210eb892a04f1322f6fda822bef Sebastian Fricke 2019-01-30 17:43:32
Fixed scripts according to dataformat changes + readme dec28d9e6ff5c5c903d5ca01a969e661d43b66c6 Sebastian Fricke 2019-01-29 21:08:04
Working Checkboxes and file import 25378c68a6220c1c6570642920e6150a50415153 Sebastian Fricke 2019-01-29 21:03:23
Added checkboxes, descriptions, import and runbutton 2021f0960e70c8c229ec08488165dc01b998a6e0 Sebastian Fricke 2019-01-27 22:19:18
Added market connection, cosmetics in product import c9a771d5e7a3a80adc650e773c568e00dd8e2aea Sebastian Fricke 2019-01-23 15:01:47
Amazon Data Upload 33dbd0ed6945c01d8917ceae3cf3964f051a2288 Sebastian Fricke 2019-01-22 14:43:39
Readme started, amazon sku upload, vari upload, images f43a9e83598c3e4623bcb08667e2b4e649b2cdea Sebastian Fricke 2019-01-22 10:44:40
Amazon SKU Upload 8586da2ae91d49c81a0d9b6ff220c8a1b1b011a6 Sebastian Fricke 2019-01-16 18:36:54
Inital Commit with current working version of the CLI Tool and the work in progress of the GUI. 207fef4277f7c169aa79eb39ec1aaaab258b888c Sebastian Fricke 2019-01-16 09:47:43
Initial commit ba965ee75fe09437fb08da5edd25b20e39e17eff Sebastian Fricke 2019-01-16 09:42:30
Commit bb48084db4359210eb892a04f1322f6fda822bef - General improvements and property upload
deleted redundent code, improved item upload and added a item propery
upload.
Author: Sebastian Fricke
Author date (UTC): 2019-01-30 17:43
Committer name: Sebastian Fricke
Committer date (UTC): 2019-01-30 17:43
Parent(s): dec28d9e6ff5c5c903d5ca01a969e661d43b66c6
Signing key:
Tree: bdd15629b886ba9f7b19b83730b62736feebe260
File Lines added Lines deleted
packages/amazon_data_upload.py 1 27
packages/item_upload.py 48 20
packages/stock_upload.py 1 27
product_import.py 3 2
File packages/amazon_data_upload.py changed (mode: 100644) (index 7084878..5fc97a7)
1 1 import csv import csv
2 2 from os.path import isfile from os.path import isfile
3 3 import sys import sys
4 from variation_upload import writeCSV
4 5 try: try:
5 6 from sortedcontainers import SortedDict from sortedcontainers import SortedDict
6 7 except ImportError: except ImportError:
 
... ... except ImportError:
8 9 raise ImportError raise ImportError
9 10
10 11
11 def writeCSV(dataobject, name, columns):
12 '''Write Data into new CSV for Upload
13 OUTPUT
14 '''
15
16 output_path_number = 1
17 datatype = ".csv"
18 output_path = "Upload/" + name + "_upload_" + \
19 str(output_path_number) + datatype
20
21 while(isfile(output_path)):
22 output_path_number = int(output_path_number) + 1
23 output_path = "Upload/" + name + "_upload_" + \
24 str(output_path_number) + datatype
25
26 with open(output_path, mode='a') as item:
27 writer = csv.DictWriter(item, delimiter=";", fieldnames=columns)
28 writer.writeheader()
29 for row in dataobject:
30 writer.writerow(dataobject[row])
31
32 if(isfile(output_path)):
33 print("Upload file successfully created under {0}".format(output_path))
34
35 return output_path
36
37
38 12 def amazonSkuUpload(flatfile, export): def amazonSkuUpload(flatfile, export):
39 13
40 14 column_names = ['VariationID', 'MarketID', column_names = ['VariationID', 'MarketID',
File packages/item_upload.py changed (mode: 100644) (index 9e69f47..e63a688)
1 from csv import DictReader, DictWriter
1 import csv
2 2 from os.path import isfile from os.path import isfile
3 3 from sys import exit from sys import exit
4 from variation_upload import writeCSV
5
6
4 7 try: try:
5 8 from sortedcontainers import SortedDict from sortedcontainers import SortedDict
6 9 except ImportError: except ImportError:
 
... ... def itemUpload(filepath, intern_number):
33 36 Data = SortedDict() Data = SortedDict()
34 37
35 38 with open(filepath, mode='r') as item: with open(filepath, mode='r') as item:
36 reader = DictReader(item, delimiter=";")
39 reader = csv.DictReader(item, delimiter=";")
37 40 for row in reader: for row in reader:
38 # if the item is a parent scrap the name and the desc from the flatfile
41 # if the item is a parent scrap the name and the desc from the
42 # flatfile
39 43 if(row['parent_child'] == 'parent'): if(row['parent_child'] == 'parent'):
40 44 try: try:
41 45 if(row['package_height'] and row['package_length'] and row['package_width']): if(row['package_height'] and row['package_length'] and row['package_width']):
 
... ... def itemUpload(filepath, intern_number):
68 72
69 73 # open the intern number csv to get the item ID # open the intern number csv to get the item ID
70 74 with open(intern_number, mode='r') as item: with open(intern_number, mode='r') as item:
71 reader = DictReader(item, delimiter=";")
75 reader = csv.DictReader(item, delimiter=";")
72 76 for row in reader: for row in reader:
73 77 if(row['amazon_sku'] in [*Data]): if(row['amazon_sku'] in [*Data]):
74 78 Data[row['amazon_sku']]['ItemID'] = row['article_id'] Data[row['amazon_sku']]['ItemID'] = row['article_id']
 
... ... def itemUpload(filepath, intern_number):
77 81 # OUTPUT # OUTPUT
78 82 # -------------------------------------------------------------- # --------------------------------------------------------------
79 83
80 output_path_number = 1
81 datatype = ".csv"
82 output_path = "Upload/item_upload_" + str(output_path_number) + datatype
84 writeCSV(Data, "item", column_names_output)
85
86
87 def itemPropertyUpload(flatfile, export):
88
89 with open(flatfile, mode='r') as item:
90 reader = csv.DictReader(item, delimiter=';', lineterminator='\n')
83 91
84 while(isfile(output_path)):
85 output_path_number = int(output_path_number) + 1
86 output_path = "Upload/item_upload_" + \
87 str(output_path_number) + datatype
92 material = {}
93 # search for a material name and assign a number that correlates to it
94 for row in reader:
95 if(row['parent_child'] == 'parent'):
96 if(re.search(r'(cotton|baumwolle)',
97 row['outer_material_type'].lower())):
98 material[row['item_sku']] = 4
99 if(re.search(r'(hemp|hanf)',
100 row['outer_material_type'].lower())):
101 material[row['item_sku']] = 5
102 if(re.search(r'(viskose|viscose)',
103 row['outer_material_type'].lower())):
104 material[row['item_sku']] = 6
105
106 with open(export, mode='r') as item:
107 reader = csv.DictReader(item, delimiter=';', lineterminator='\n')
88 108
89 with open(output_path, mode='a') as item:
90 writer = DictWriter(item, delimiter=";",
91 fieldnames=column_names_output)
92 writer.writeheader()
93 for row in Data:
94 writer.writerow(Data[row])
109 column_names = ['PropertyItemID', 'ItemID',
110 'PrimaryVariationCustomNumber']
111
112 Data = {}
113 for row in reader:
114 if(row['AttributeValueSetID'] == ''):
115 values = ['3',
116 row['ItemID'],
117 row['VariationName']]
95 118
96 if(isfile(output_path)):
97 print("Upload file successfully created under {0}".format(output_path))
119 Data[row['VariationNumber'] + '1'] = dict(zip(column_names,
120 values))
121 values = [material[row['VariationNumber']],
122 row['ItemID'],
123 row['VariationName']]
98 124
99 return output_path
125 Data[row['VariationNumber'] + '2'] = dict(zip(column_names,
126 values))
127 writeCSV(Data, "property", column_names)
File packages/stock_upload.py changed (mode: 100644) (index 568306c..9a28b2b)
2 2
3 3 from csv import DictReader, DictWriter from csv import DictReader, DictWriter
4 4 from os.path import isfile from os.path import isfile
5 from variation_upload import writeCSV
5 6 try: try:
6 7 from sortedcontainers import SortedDict from sortedcontainers import SortedDict
7 8 except ImportError: except ImportError:
 
... ... except ImportError:
9 10 raise ImportError raise ImportError
10 11
11 12
12 def writeCSV(dataobject, name, columns):
13 '''Write Data into new CSV for Upload
14 OUTPUT
15 '''
16
17 output_path_number = 1
18 datatype = ".csv"
19 output_path = "Upload/" + name + "_upload_" + \
20 str(output_path_number) + datatype
21
22 while(isfile(output_path)):
23 output_path_number = int(output_path_number) + 1
24 output_path = "Upload/" + name + "_upload_" + \
25 str(output_path_number) + datatype
26
27 with open(output_path, mode='a') as item:
28 writer = DictWriter(item, delimiter=";", fieldnames=columns)
29 writer.writeheader()
30 for row in dataobject:
31 writer.writerow(dataobject[row])
32
33 if(isfile(output_path)):
34 print("Upload file successfully created under {0}".format(output_path))
35
36 return output_path
37
38
39 13 def stockUpload(flatfile, stocklist): def stockUpload(flatfile, stocklist):
40 14
41 15 # The column header names # The column header names
File product_import.py changed (mode: 100644) (index e7dcd5d..1ce6334)
1 1 from tkinter import Tk from tkinter import Tk
2 2 from tkinter.filedialog import askopenfilename from tkinter.filedialog import askopenfilename
3 3 from sys import exit from sys import exit
4 from packages.item_upload import itemUpload
4 from packages.item_upload import itemUpload, itemPropertyUpload
5 5 # from packages.attribute_upload import attributeUpload # from packages.attribute_upload import attributeUpload
6 6 from packages.variation_upload import variationUpload, setActive, EANUpload, marketConnection from packages.variation_upload import variationUpload, setActive, EANUpload, marketConnection
7 7 from packages.stock_upload import stockUpload, priceUpload from packages.stock_upload import stockUpload, priceUpload
 
... ... def main():
52 52 print("Something went wrong at the Export file import!") print("Something went wrong at the Export file import!")
53 53 print("spreadsheet csv containing the export : ", export) print("spreadsheet csv containing the export : ", export)
54 54 try: try:
55 print("EAN, Active & Price Upload")
55 print("EAN, Active, Merkmale & Price Upload")
56 56 EANUpload(sheet, export) EANUpload(sheet, export)
57 57 setActive(sheet, export) setActive(sheet, export)
58 itemPropertyUpload(sheet, export)
58 59 priceUpload(sheet, export) priceUpload(sheet, export)
59 60 except FileNotFoundError as err: except FileNotFoundError as err:
60 61 print(err) print(err)
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using git:
git clone git://git.rocketgit.com/user/initBasti/Amazon2PlentySync

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main