diff --git a/.DS_Store b/.DS_Store
index dc6600646c5fa4ef00941aabeef5bfd0226c9111..baec9d27c833adc893defb9b28fc452512a33b26 100644
Binary files a/.DS_Store and b/.DS_Store differ
diff --git a/.gitignore copy b/.gitignore copy
deleted file mode 100644
index 6dc91d8183037294b495ac1607c0f4e4e248edcc..0000000000000000000000000000000000000000
--- a/.gitignore copy	
+++ /dev/null
@@ -1,157 +0,0 @@
-# Byte-compiled / optimized / DLL files
-__pycache__/
-**/__pycache__/
-**/datasets/
-*.py[cod]
-*$py.class
-*.pyc
-*.pyo
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-share/python-wheels/
-*.egg-info/
-.installed.cfg
-*.egg
-MANIFEST
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.nox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-*.py,cover
-.hypothesis/
-.pytest_cache/
-cover/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-db.sqlite3
-db.sqlite3-journal
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-.pybuilder/
-target/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# IPython
-profile_default/
-ipython_config.py
-
-# pyenv
-#   For a library or package, you might want to ignore these files since the code is
-#   intended to run in multiple environments; otherwise, check them in:
-# .python-version
-
-# pipenv
-#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
-#   However, in case of collaboration, if having platform-specific dependencies or dependencies
-#   having no cross-platform support, pipenv may install dependencies that don't work, or not
-#   install all needed dependencies.
-#Pipfile.lock
-
-# poetry
-#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
-#   This is especially recommended for binary packages to ensure reproducibility, and is more
-#   commonly ignored for libraries.
-#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
-#poetry.lock
-
-# PEP 582; used by e.g. github.com/David-OConnor/pyflow
-__pypackages__/
-
-# Celery stuff
-celerybeat-schedule
-celerybeat.pid
-
-# SageMath parsed files
-*.sage.py
-
-# Environments
-.env
-.venv
-env/
-venv/
-ENV/
-env.bak/
-venv.bak/
-
-# Spyder project settings
-.spyderproject
-.spyproject
-
-# Rope project settings
-.ropeproject
-
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
-.dmypy.json
-dmypy.json
-
-# Pyre type checker
-.pyre/
-
-# pytype static type analyzer
-.pytype/
-
-# Cython debug symbols
-cython_debug/
-
-# PyCharm
-#  JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
-#  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
-#  and can be added to the global gitignore or merged into this file.  For a more nuclear
-#  option (not recommended) you can uncomment the following to ignore the entire idea folder.
-#.idea/
-MetaAugment/__pycache__/main.cpython-38.pyc
diff --git a/MetaAugment/UCB1_JC.py b/MetaAugment/UCB1_JC.py
index 6c121a539e69cc8d1ac0cf22b66e8fa1d50738a7..1986368aff7f5d42e966f61e0cf17424d0f2fb7e 100644
--- a/MetaAugment/UCB1_JC.py
+++ b/MetaAugment/UCB1_JC.py
@@ -230,20 +230,20 @@ def run_UCB1(policies, batch_size, learning_rate, ds, toy_size, max_epochs, earl
 
         # open data and apply these transformations
         if ds == "MNIST":
-            train_dataset = datasets.MNIST(root='./MetaAugment/train', train=True, download=True, transform=transform)
-            test_dataset = datasets.MNIST(root='./MetaAugment/test', train=False, download=True, transform=transform)
+            train_dataset = datasets.MNIST(root='./MetaAugment/datasets/mnist/train', train=True, download=True, transform=transform)
+            test_dataset = datasets.MNIST(root='./MetaAugment/datasets/mnist/test', train=False, download=True, transform=transform)
         elif ds == "KMNIST":
-            train_dataset = datasets.KMNIST(root='./MetaAugment/train', train=True, download=True, transform=transform)
-            test_dataset = datasets.KMNIST(root='./MetaAugment/test', train=False, download=True, transform=transform)
+            train_dataset = datasets.KMNIST(root='./MetaAugment/datasets/kmnist/train', train=True, download=True, transform=transform)
+            test_dataset = datasets.KMNIST(root='./MetaAugment/datasets/kmnist/test', train=False, download=True, transform=transform)
         elif ds == "FashionMNIST":
-            train_dataset = datasets.FashionMNIST(root='./MetaAugment/train', train=True, download=True, transform=transform)
-            test_dataset = datasets.FashionMNIST(root='./MetaAugment/test', train=False, download=True, transform=transform)
+            train_dataset = datasets.FashionMNIST(root='./MetaAugment/datasets/fashionmnist/train', train=True, download=True, transform=transform)
+            test_dataset = datasets.FashionMNIST(root='./MetaAugment/datasets/fashionmnist/test', train=False, download=True, transform=transform)
         elif ds == "CIFAR10":
-            train_dataset = datasets.CIFAR10(root='./MetaAugment/train', train=True, download=True, transform=transform)
-            test_dataset = datasets.CIFAR10(root='./MetaAugment/test', train=False, download=True, transform=transform)
+            train_dataset = datasets.CIFAR10(root='./MetaAugment/datasets/fashionmnist/train', train=True, download=True, transform=transform)
+            test_dataset = datasets.CIFAR10(root='./MetaAugment/datasets/fashionmnist/test', train=False, download=True, transform=transform)
         elif ds == "CIFAR100":
-            train_dataset = datasets.CIFAR100(root='./MetaAugment/train', train=True, download=True, transform=transform)
-            test_dataset = datasets.CIFAR100(root='./MetaAugment/test', train=False, download=True, transform=transform)
+            train_dataset = datasets.CIFAR100(root='./MetaAugment/datasets/fashionmnist/train', train=True, download=True, transform=transform)
+            test_dataset = datasets.CIFAR100(root='./MetaAugment/datasets/fashionmnist/test', train=False, download=True, transform=transform)
 
         # check sizes of images
         img_height = len(train_dataset[0][0][0])
diff --git a/auto_augmentation/.DS_Store b/auto_augmentation/.DS_Store
index 423820404538e588d0dd42d7334bc8603eccb1b8..a13c2cd854aa7d796b880950521b62a06b64e87c 100644
Binary files a/auto_augmentation/.DS_Store and b/auto_augmentation/.DS_Store differ
diff --git a/auto_augmentation/progress.py b/auto_augmentation/progress.py
index 77845a0260cf6c2494e8111f69fce2fdbf3124a8..411e8c5551a7580641008f1943c3d67d248f7629 100644
--- a/auto_augmentation/progress.py
+++ b/auto_augmentation/progress.py
@@ -17,7 +17,7 @@ from tqdm import trange
 torch.manual_seed(0)
 # import agents and its functions
 
-from MetaAugment import UCB1_JC_py as UCB1_JC
+from MetaAugment import UCB1_JC as UCB1_JC
 
 
 
diff --git a/auto_augmentation/static/.DS_Store b/auto_augmentation/static/.DS_Store
index cbf9ce2f5606f2ec8e9da4a923b1306d7d64d602..add86eb83dccdad155dff1db2e1c401c2959f67f 100644
Binary files a/auto_augmentation/static/.DS_Store and b/auto_augmentation/static/.DS_Store differ
diff --git a/auto_augmentation/templates/home.html b/auto_augmentation/templates/home.html
index 5bb10ef4f5aedb11da5e16aeb90eed9ee98ca851..99a9ecb50b272d08098c28b1c91499c21f7faa20 100644
--- a/auto_augmentation/templates/home.html
+++ b/auto_augmentation/templates/home.html
@@ -6,8 +6,9 @@
 <h3>Choose your dataset</h3>
 <form action="/user_input">
   <!-- upload dataset -->
-  <label for="dataset_upload">You can upload your dataset here:</label>
-  <input type="file" name="dataset_upload" class="upload"><br><br>
+  <label for="dataset_upload">You can upload your dataset folder here:</label>
+  <!-- <input type="file" name="dataset_upload" class="upload"><br><br> -->
+  <input type="file" webkitdirectory mozdirectory /><br><br>
 
   <!-- dataset radio button -->
   Or you can select a dataset from our database: <br>
@@ -34,7 +35,7 @@
 <!-- --------------------------------------------------------------- -->
 
 
-  <h3>Choose the network which the dataset is trained on</h3>
+  <h3>Choose the network the dataset is trained on</h3>
   <!-- upload network -->
   <label for="network_upload">Please upload your network here:</label>
   <input type="file" name="network_upload" class="upload"><br><br>