diff --git a/latentscope/__version__.py b/latentscope/__version__.py index 7fd229a..fc79d63 100644 --- a/latentscope/__version__.py +++ b/latentscope/__version__.py @@ -1 +1 @@ -__version__ = '0.2.0' +__version__ = '0.2.1' diff --git a/latentscope/scripts/scope.py b/latentscope/scripts/scope.py index ffdfa57..420b6c1 100644 --- a/latentscope/scripts/scope.py +++ b/latentscope/scripts/scope.py @@ -130,7 +130,9 @@ def get_next_scopes_number(dataset): json.dump([], f) input_df = pd.read_parquet(os.path.join(DATA_DIR, dataset_id, "input.parquet")) - combined_df = input_df[input_df.index.isin(scope_parquet['ls_index'])] + input_df.reset_index(inplace=True) + input_df = input_df[input_df['index'].isin(scope_parquet['ls_index'])] + combined_df = input_df.join(scope_parquet.set_index('ls_index'), on='index', rsuffix='_ls') combined_df.to_parquet(os.path.join(directory, id + "-input.parquet")) print("wrote scope", id) diff --git a/latentscope/server/bulk.py b/latentscope/server/bulk.py index aadcc7d..10270f9 100644 --- a/latentscope/server/bulk.py +++ b/latentscope/server/bulk.py @@ -164,6 +164,7 @@ def delete_rows(): def update_combined(df, dataset_id, scope_id): input_df = pd.read_parquet(os.path.join(DATA_DIR, dataset_id, "input.parquet")) - combined_df = input_df[input_df.index.isin(df['ls_index'])] + input_df.reset_index(inplace=True) + input_df = input_df[input_df['index'].isin(df['ls_index'])] + combined_df = input_df.join(df.set_index('ls_index'), on='index', rsuffix='_ls') combined_df.to_parquet(os.path.join(DATA_DIR, dataset_id, "scopes", scope_id + "-input.parquet")) - diff --git a/latentscope/server/jobs.py b/latentscope/server/jobs.py index 422e9ca..271c098 100644 --- a/latentscope/server/jobs.py +++ b/latentscope/server/jobs.py @@ -239,7 +239,8 @@ def delete_embedding(): job_id = str(uuid.uuid4()) - command = f'rm -rf "{os.path.join(DATA_DIR, dataset, "embeddings", f"{embedding_id}*")}"' + path = os.path.join(DATA_DIR, dataset, "embeddings", f"{embedding_id}*").replace(" ", "\\ ") + command = f'rm -rf {path}' for umap in umaps_to_delete: delete_umap(dataset, umap) threading.Thread(target=run_job, args=(dataset, job_id, command)).start() @@ -284,10 +285,12 @@ def delete_umap(dataset, umap_id): job_id = str(uuid.uuid4()) - command = f'rm -rf "{os.path.join(DATA_DIR, dataset, "umaps", f"{umap_id}*")}"' + path = os.path.join(DATA_DIR, dataset, "umaps", f"{umap_id}*").replace(" ", "\\ ") + command = f'rm -rf {path}' # Create the rm -rf commands from the clusters_to_delete list for cluster in clusters_to_delete: - command += f'; rm "{os.path.join(DATA_DIR, dataset, "clusters", f"{cluster}*")}"' + cpath = os.path.join(DATA_DIR, dataset, "clusters", f"{cluster}*").replace(" ", "\\ ") + command += f'; rm -rf {cpath}' threading.Thread(target=run_job, args=(dataset, job_id, command)).start() return jsonify({"job_id": job_id}) @@ -310,7 +313,8 @@ def delete_cluster(): dataset = request.args.get('dataset') cluster_id = request.args.get('cluster_id') job_id = str(uuid.uuid4()) - command = f'rm -rf "{os.path.join(DATA_DIR, dataset, "clusters", f"{cluster_id}*")}"' + path = os.path.join(DATA_DIR, dataset, "clusters", f"{cluster_id}*").replace(" ", "\\ ") + command = f'rm -rf {path}' threading.Thread(target=run_job, args=(dataset, job_id, command)).start() return jsonify({"job_id": job_id}) @@ -354,7 +358,8 @@ def delete_scope(): scope_id = request.args.get('scope_id') job_id = str(uuid.uuid4()) - command = f'rm -rf "{os.path.join(DATA_DIR, dataset, "scopes", f"{scope_id}*")}"' + path = os.path.join(DATA_DIR, dataset, "scopes", f"{scope_id}*").replace(" ", "\\ ") + command = f'rm -rf {path}' threading.Thread(target=run_job, args=(dataset, job_id, command)).start() return jsonify({"job_id": job_id}) diff --git a/web/src/components/HullPlot.jsx b/web/src/components/HullPlot.jsx index 9c2e0a5..c78b2bb 100644 --- a/web/src/components/HullPlot.jsx +++ b/web/src/components/HullPlot.jsx @@ -2,7 +2,7 @@ import { useEffect, useRef } from 'react'; // import { scaleLinear } from 'd3-scale'; import { line, curveLinearClosed, curveCatmullRomClosed } from 'd3-shape'; import { select } from 'd3-selection'; -// import { transition } from 'd3-transition'; +import { transition } from 'd3-transition'; import { easeExpOut, easeExpIn, easeCubicInOut} from 'd3-ease'; // import { interpolate } from 'flubber';