Refactoring

This commit is contained in:
Abdalaziz Rashid 2020-08-05 14:42:45 +03:00
parent 0bf4dc6e9f
commit 28f3d5b140

View file

@ -1,39 +1,43 @@
import logging
import argparse
import pathlib
import configparser
import os
from threading import active_count
from multiprocessing import Pool
from multiprocessing.pool import ThreadPool
from random import shuffle
from tabulate import tabulate
from pathlib import Path
from Code.S_run_aifeynman import run_aifeynman
from functools import partial
from S_run_aifeynman import run_aifeynman
_CFG = {
"dataset_path" : "../Feynman_without_units/",
"operations_file" : "./14ops.txt",
"polynomial_degree" : 3,
"number_of_epochs" : 500,
"bruteforce_time" : 60,
"test_percentage" : 0,
}
class RunAll:
"""
Run the solver on all the whole dataset
Run the solver on the whole dataset
"""
def __init__(self, *, cfg_path: Path):
logging.basicConfig(filename="output.log", level=logging.DEBUG)
self.config = configparser.ConfigParser()
self.config.read(cfg_path)
self.cfg = self.config["Default"]
self.print_results()
def __init__(self, *, cfg=_CFG):
logging.basicConfig(filename="output_no_units_parallel.log", level=logging.DEBUG)
self.cfg = cfg
self.results = {}
self.run_solver()
def log_results(self):
pass
def print_results(self):
table = [
["foo", 696000, 1989100000],
["bar", 6371, 5973.6],
["baz", 1737, 73.5],
["qux", 3390, 641.85],
]
table = []
for file, sol in self.results.items():
table.append(sol[-1])
print(tabulate(
table,
headers=[
@ -45,12 +49,23 @@ class RunAll:
)
)
def run_solver(self):
def run_solver(self, dirs=None):
if not dirs:
path = Path(self.cfg["dataset_path"])
for child in path.iterdir():
dirs = list(path.iterdir())
shuffle(dirs) # Shuffle to sample a different file each time
else:
path=Path(self.cfg["dataset_path"])
child = dirs
# for child in dirs:
# print(child)
print(f"Process PID: {os.getpid()} ---------------- Number of threads: {active_count()}" )
self.results[str(child).split("/")[-1]] = run_aifeynman(
pathdir="/home/aziz/lambda_lab/AI-Feynman/example_data/",#str(path.resolve()) + "/",
filename="example2.txt",#str(child).split("/")[-1],
pathdir=str(path.resolve()) + "/",
filename=str(child).split("/")[-1],
BF_try_time=int(self.cfg["bruteforce_time"]),
BF_ops_file_type=Path(self.cfg["operations_file"]),
polyfit_deg=int(self.cfg["polynomial_degree"]),
@ -58,12 +73,43 @@ class RunAll:
vars_name=[],
test_percentage=int(self.cfg["test_percentage"]),
)
logging.info(self.results)
break
print("@"*120)
print("@"*120)
self.print_results()
def get_files(dirs, chunks=5):
dirs = list(path.iterdir())
dirs = [file for file in dirs if not (str(file).endswith("test") or str(file).endswith("train"))]
for i in range(0, len(dirs), chunks):
yield dirs[i : i + chunks]
if __name__ == "__main__":
cfg_path = pathlib.Path("/home/aziz/lambda_lab/AI-Feynman/configs.cfg")
if cfg_path.exists():
RunAll(cfg_path=cfg_path)
else:
print(f"No such a file {cfg_path}")
#cfg_path = pathlib.Path("/home/aziz/lambda_lab/AI-Feynman/configs.cfg")
#if cfg_path.exists():
# RunAll(cfg_path=cfg_path)
#else:
# print(f"No such a file {cfg_path}")
solver = RunAll().run_solver
path = Path(_CFG["dataset_path"])
#dirs = list(path.iterdir())
#chunked_dirs = list(get_files(dirs, chunks=24))
# print(chunked_dirs[0], len(chunked_dirs[0]))
# for dd in chunked_dirs:
# pool = Pool(len(dd))
# print(dd, len(dd))
# pool.map(print, dd)
# pool.map(solver, dd)
# pool.close()
parser = argparse.ArgumentParser(description='Solver')
parser.add_argument('--file', help='Enter file path')
args = parser.parse_args()
solver(args.file)