diff --git a/.dockerignore b/.dockerignore index 37a0a977b4..0de5cabe87 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,4 +8,8 @@ coverage .docs .github .pytest_cache -data \ No newline at end of file +data + +client/node_modules +client/.nx +client/dist diff --git a/.docs/source/designsafe.apps.auth.rst b/.docs/source/designsafe.apps.auth.rst index 0d872b14a2..ccdbdd0f2a 100644 --- a/.docs/source/designsafe.apps.auth.rst +++ b/.docs/source/designsafe.apps.auth.rst @@ -28,14 +28,6 @@ designsafe.apps.auth.backends module :undoc-members: :show-inheritance: -designsafe.apps.auth.context_processors module ----------------------------------------------- - -.. automodule:: designsafe.apps.auth.context_processors - :members: - :undoc-members: - :show-inheritance: - designsafe.apps.auth.middleware module -------------------------------------- diff --git a/.flake8 b/.flake8 index d5604d56e3..3c81c28f5d 100644 --- a/.flake8 +++ b/.flake8 @@ -1,7 +1,10 @@ [flake8] # E501: line is too long. # H101: Use TODO(NAME) -ignore = E501, H101 +# W503: line break before binary operator. Ingore as black will break this rule. +ignore = E501, H101, W503 exclude = __pycache__, tests.py, migrations + +extend-ignore = W503 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index aa17acbf10..75fa1b30e4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,10 +1,9 @@ name: CI -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch +# Controls when the action will run. Triggers the workflow on pushes to main or on pull request events on: push: - branches: [ master ] + branches: [ main ] pull_request: branches: [ '**' ] @@ -17,7 +16,7 @@ jobs: - uses: actions/checkout@v4 - name: Fetch base and install Poetry - run: | + run: | git fetch origin ${{github.base_ref}} pipx install poetry @@ -34,6 +33,30 @@ jobs: - run: | poetry install + - name: Run Server-side unit tests and generate coverage report + run: | + poetry run pytest --cov-config=.coveragerc --cov=designsafe --cov-report=xml -ra designsafe + + Server_Side_Linting: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + + - name: Fetch base and install Poetry + run: | + git fetch origin ${{github.base_ref}} + pipx install poetry + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install Python Packages + run: | + poetry install + - name: Run Server-side linting with pytest # Only run on new files for now-- for all changes, filter is ACMRTUXB # Check manage.py to prevent a crash if no files are selected. @@ -44,16 +67,12 @@ jobs: run: | poetry run black $(git diff --name-only --diff-filter=A origin/${{github.base_ref}} | grep -E "(.py$)") manage.py --check - - name: Run Server-side unit tests and generate coverage report - run: | - poetry run pytest --cov-config=.coveragerc --cov=designsafe --cov-report=xml -ra designsafe - Client_Side_Unit_Tests: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Setup Node.js for use with actions - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: 16.x cache: npm @@ -64,12 +83,12 @@ jobs: React_NX_unit_tests: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Node.js for use with actions - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: 20 cache: npm @@ -78,14 +97,35 @@ jobs: working-directory: client - uses: nrwl/nx-set-shas@v3 - # Check linting/formatting of workspace files. - - run: npx nx format:check - working-directory: client - # Lint/test/build any apps and libs that have been impacted by the diff. - - run: npx nx affected --target=lint --parallel=3 - working-directory: client + # Test/build any apps and libs that have been impacted by the diff. - run: npx nx affected --target=test --parallel=3 --ci --code-coverage working-directory: client - run: npx nx affected --target=build --parallel=3 working-directory: client + + React_NX_linting: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Node.js for use with actions + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: npm + + - run: npm ci + working-directory: client + + - uses: nrwl/nx-set-shas@v3 + + # Check linting/formatting of workspace files. + - run: npx nx format:check + working-directory: client + + # Lint any apps and libs that have been impacted by the diff. + - run: npx nx affected --target=lint --parallel=3 + working-directory: client diff --git a/.gitignore b/.gitignore index 8bd0b28d09..dbcc6364ed 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,8 @@ settings.json designsafe/apps/rapid/static/designsafe/apps/rapid/build/bundle.* designsafe/apps/geo/static/designsafe/apps/geo/build/bundle.* designsafe/static/build/ +designsafe/static/react-assets/ +designsafe/templates/react-assets.html # designsafe/static/styles/base.* /static diff --git a/.pylintrc b/.pylintrc index 5a4da2b364..d8f0f80226 100644 --- a/.pylintrc +++ b/.pylintrc @@ -583,7 +583,7 @@ ignored-checks-for-mixins=no-member, # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace,Tapis # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. diff --git a/Makefile b/Makefile index 98bd8815d6..0104e5c8b2 100644 --- a/Makefile +++ b/Makefile @@ -1,11 +1,23 @@ .PHONY: build build: - docker-compose -f ./conf/docker/docker-compose.yml build + docker compose -f ./conf/docker/docker-compose.yml build + +.PHONY: build-dev +build-dev: + docker compose -f ./conf/docker/docker-compose-dev.yml build .PHONY: start start: - docker-compose -f ./conf/docker/docker-compose-dev.all.debug.yml up + docker compose -f ./conf/docker/docker-compose-dev.all.debug.yml up .PHONY: stop stop: - docker-compose -f ./conf/docker/docker-compose-dev.all.debug.yml down + docker compose -f ./conf/docker/docker-compose-dev.all.debug.yml down + +.PHONY: start-m1 +start-m1: + docker compose -f ./conf/docker/docker-compose-dev.all.debug.m1.yml up + +.PHONY: stop-m1 +stop-m1: + docker compose -f ./conf/docker/docker-compose-dev.all.debug.m1.yml down diff --git a/README.md b/README.md index 7e939da5f1..b2e91f8b7a 100644 --- a/README.md +++ b/README.md @@ -40,34 +40,37 @@ If you are on a Mac or a Windows machine, the recommended method is to install - `AGAVE_*`: should be set to enable Agave API integration (authentication, etc.) - `RT_*`: should be set to enable ticketing - Make copies of [rabbitmq.sample.env](conf/env_files/rabbitmq.sample.env) and [mysql.sample.env](conf/env_files/mysql.sample.env), - then rename them to `rabbitmq.env` and `mysql.env`. - - Make copies of [mysql.sample.cnf](conf/mysql.sample.cnf), [redis.sample.conf](conf/redis.sample.conf), - and [rabbitmq.sample.conf](conf/rabbitmq.sample.conf), then rename them to `mysql.cnf`, `redis.conf`, and `rabbitmq.conf`. + Make a copy of [rabbitmq.sample.env](conf/env_files/rabbitmq.sample.env) + then rename it to `rabbitmq.env`. Make a copy of [external_resource_secrets.sample.py](designsafe/settings/external_resource_secrets.sample.py) and rename it to `external_resource_secrets.py`. -3. Build the containers and frontend package +3. Build the containers and frontend packages - ``` - $ make build - ``` - or - ``` - $ docker-compose -f conf/docker/docker-compose.yml build - ``` + 1. Containers: + ```sh + make build-dev + ``` + or + ```sh + docker-compose -f conf/docker/docker-compose-dev.yml build + ``` - These lines install the node packages required for DesignSafe, - and build the frontend package. - ``` - $ npm ci - $ npm run build - ``` + 2. Angular Frontend + static assets: + ```sh + npm ci + docker run -v `pwd`:`pwd` -w `pwd` -it node:16 /bin/bash -c "npm run build" + ``` - If you are working with the frontend code and want it to automatically update, - use `npm run dev` rather than `npm run build` to have it build upon saving the file. + **Note:** If you are working with the frontend code and want it to automatically update, use `npm run dev` rather than `npm run build` to have it build upon saving the file. + + 3. React Frontend (in another terminal): + ```sh + cd client + npm ci + npm run start + ``` 4. Start local containers @@ -82,7 +85,7 @@ If you are on a Mac or a Windows machine, the recommended method is to install ``` $ docker exec -it des_django bash $ ./manage.py migrate - $ ./manage.py collectstatic -i demo + $ ./manage.py collectstatic --ignore demo --no-input $ ./manage.py createsuperuser ``` @@ -225,8 +228,8 @@ $ docker-compose -f conf/docker/docker-compose-dev.all.debug.yml up $ npm run dev ``` -When using this compose file, your Agave Client should be configured with a `callback_url` -of `http://$DOCKER_HOST_IP:8000/auth/agave/callback/`. +When using this compose file, your Tapis Client should be configured with a `callback_url` +of `http://$DOCKER_HOST_IP:8000/auth/tapis/callback/`. For developing some services, e.g. Box.com integration, https support is required. To enable an Nginx http proxy run using the [`docker-compose-http.yml`](docker-compose-http.yml) @@ -238,9 +241,6 @@ $ docker-compose -f docker-compose-http.yml build $ docker-compose -f docker-compose-http.yml up ``` -When using this compose file, your Agave Client should be configured with a `callback_url` -of `https://$DOCKER_HOST_IP/auth/agave/callback/`. - ### Agave filesystem setup 1. Delete all of the old metadata objects using this command: diff --git a/bin/build_client.sh b/bin/build_client.sh deleted file mode 100644 index 90421c977e..0000000000 --- a/bin/build_client.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -x -if [ "$1" == false ] ; then - cd /srv/www/designsafe - npm ci && npm run build -fi diff --git a/bin/dumpdata.sh b/bin/dumpdata.sh deleted file mode 100755 index f9bc38c80c..0000000000 --- a/bin/dumpdata.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -python /portal/manage.py dumpdata \ - --natural-foreign --natural-primary \ - --exclude=cmsplugin_cascade.Segmentation \ - --exclude=admin.logentry \ - --exclude=cms.pageusergroup > /datadump/datadump-`date +%Y%m%d`.json diff --git a/bin/loaddata.sh b/bin/loaddata.sh deleted file mode 100755 index 828939f56e..0000000000 --- a/bin/loaddata.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -DATE=`date +%Y%m%d` -echo "Flushing current database..." -python /portal/manage.py flush --no-initial-data -echo "Loading data from file datadump-`date +%Y%m%d`.json..." -python /portal/manage.py loaddata /datadump/datadump-${DATE}.json -echo "Copying db.sqlite3 out of container..." -cp db.sqlite3 /datadump/db-${DATE}.sqlite3 -echo "Done!" diff --git a/bin/mysql.sh b/bin/mysql.sh deleted file mode 100755 index f1f641af19..0000000000 --- a/bin/mysql.sh +++ /dev/null @@ -1 +0,0 @@ -#!/usr/bin/env bash diff --git a/bin/run-celery-debug.sh b/bin/run-celery-debug.sh index 55602a0982..3e2d894794 100755 --- a/bin/run-celery-debug.sh +++ b/bin/run-celery-debug.sh @@ -5,4 +5,4 @@ # celery -A designsafe beat -l info --pidfile= --schedule=/tmp/celerybeat-schedule & celery -A designsafe worker -l info --autoscale=15,5 -Q indexing,files -n designsafe_worker01 & -celery -A designsafe worker -l info --autoscale=10,3 -Q default,api -n designsafe_worker02 \ No newline at end of file +celery -A designsafe worker -l info --autoscale=10,3 -Q default,api,onboarding -n designsafe_worker02 diff --git a/bin/run-celery-dev.sh b/bin/run-celery-dev.sh deleted file mode 100755 index dc7f4678e5..0000000000 --- a/bin/run-celery-dev.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash - -# Run Celery as the DesignSafe Community Account -celery -A designsafe beat -l info --pidfile= --schedule=/tmp/celerybeat-schedule & -celery -A designsafe worker -l info --autoscale=15,5 -Q indexing,files & -celery -A designsafe worker -l info --autoscale=10,3 -Q default,api" \ No newline at end of file diff --git a/bin/run-celery.sh b/bin/run-celery.sh index 092e3eb671..1544da1f74 100755 --- a/bin/run-celery.sh +++ b/bin/run-celery.sh @@ -3,4 +3,4 @@ # Run Celery as the DesignSafe Community Account celery -A designsafe beat -l info --pidfile= --schedule=/tmp/celerybeat-schedule & celery -A designsafe worker -l info --autoscale=15,5 -Q indexing,files -n designsafe_worker01 & -celery -A designsafe worker -l info --autoscale=10,3 -Q default,api -n designsafe_worker02 \ No newline at end of file +celery -A designsafe worker -l info --autoscale=10,3 -Q default,api,onboarding -n designsafe_worker02 diff --git a/bin/run-django.sh b/bin/run-django.sh deleted file mode 100755 index 25fca63107..0000000000 --- a/bin/run-django.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -# run django dev server as designsafe community account -python -m debugpy --listen 0.0.0.0:5678 manage.py runserver 0.0.0.0:8000 diff --git a/bin/run-flower.sh b/bin/run-flower.sh deleted file mode 100755 index f869d64714..0000000000 --- a/bin/run-flower.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -## -# Run Flower monitor UI -# -su tg458981 -c "flower -A designsafe proj --broker=$FLOWER_BROKER --broker_api=$FLOWER_BROKER_API" diff --git a/bin/run-tests.sh b/bin/run-tests.sh deleted file mode 100755 index 5ad58bcf9b..0000000000 --- a/bin/run-tests.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -### -# Run front-end tests -### - -# Start Xvfb -test -e /tmp/.X99-lock -/usr/bin/Xvfb :99 & -xvfb=$! - -export DISPLAY=:99.0 -export CHROME_BIN=/usr/bin/chromium-browser - -/portal/node_modules/.bin/karma start /portal/karma.conf.js --single-run - -kill -TERM $xvfb -wait $xvfb diff --git a/bin/run-uwsgi.sh b/bin/run-uwsgi.sh deleted file mode 100755 index 8b10fecd74..0000000000 --- a/bin/run-uwsgi.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -/usr/local/bin/uwsgi --ini /portal/conf/uwsgi_websocket.ini diff --git a/client/modules/datafiles/src/DatafilesBreadcrumb/DatafilesBreadcrumb.module.css b/client/modules/_common_components/src/datafiles/DatafilesBreadcrumb/DatafilesBreadcrumb.module.css similarity index 100% rename from client/modules/datafiles/src/DatafilesBreadcrumb/DatafilesBreadcrumb.module.css rename to client/modules/_common_components/src/datafiles/DatafilesBreadcrumb/DatafilesBreadcrumb.module.css diff --git a/client/modules/datafiles/src/DatafilesBreadcrumb/DatafilesBreadcrumb.tsx b/client/modules/_common_components/src/datafiles/DatafilesBreadcrumb/DatafilesBreadcrumb.tsx similarity index 64% rename from client/modules/datafiles/src/DatafilesBreadcrumb/DatafilesBreadcrumb.tsx rename to client/modules/_common_components/src/datafiles/DatafilesBreadcrumb/DatafilesBreadcrumb.tsx index dda77dfcb4..50c7a922db 100644 --- a/client/modules/datafiles/src/DatafilesBreadcrumb/DatafilesBreadcrumb.tsx +++ b/client/modules/_common_components/src/datafiles/DatafilesBreadcrumb/DatafilesBreadcrumb.tsx @@ -6,27 +6,18 @@ import { getSystemRootDisplayName, useAuthenticatedUser } from '@client/hooks'; function getPathRoutes( baseRoute: string, path: string = '', - systemRoot: string = '', - systemRootAlias?: string + relativeTo: string = '' ) { - const pathComponents = decodeURIComponent(path.replace(systemRoot, '')) + const pathComponents = path + .replace(relativeTo, '') .split('/') .filter((p) => !!p); - - const systemRootBreadcrumb = { - path: `${baseRoute}/${systemRoot}`, - title: systemRootAlias ?? 'Data Files', - }; - - return [ - systemRootBreadcrumb, - ...pathComponents.map((comp, i) => ({ - title: comp, - path: `${baseRoute}/${systemRoot}${encodeURIComponent( - '/' + pathComponents.slice(0, i + 1).join('/') - )}`, - })), - ]; + return pathComponents.map((comp, i) => ({ + title: comp, + path: `${baseRoute}/${encodeURIComponent(relativeTo)}${encodeURIComponent( + '/' + pathComponents.slice(0, i + 1).join('/') + )}`, + })); } export const DatafilesBreadcrumb: React.FC< @@ -36,7 +27,6 @@ export const DatafilesBreadcrumb: React.FC< baseRoute: string; systemRoot: string; systemRootAlias?: string; - skipBreadcrumbs?: number; // Number of path elements to skip when generating breadcrumbs } & BreadcrumbProps > = ({ initialBreadcrumbs, @@ -44,14 +34,11 @@ export const DatafilesBreadcrumb: React.FC< baseRoute, systemRoot, systemRootAlias, - skipBreadcrumbs, ...props }) => { const breadcrumbItems = [ ...initialBreadcrumbs, - ...getPathRoutes(baseRoute, path, systemRoot, systemRootAlias).slice( - skipBreadcrumbs ?? 0 - ), + ...getPathRoutes(baseRoute, path, systemRoot), ]; return ( @@ -77,6 +64,7 @@ export const BaseFileListingBreadcrumb: React.FC< path: string; systemRootAlias?: string; initialBreadcrumbs?: { title: string; path: string }[]; + systemLabel?: string; } & BreadcrumbProps > = ({ api, @@ -84,18 +72,25 @@ export const BaseFileListingBreadcrumb: React.FC< path, systemRootAlias, initialBreadcrumbs = [], + systemLabel, ...props }) => { const { user } = useAuthenticatedUser(); - + const rootAlias = + systemRootAlias || getSystemRootDisplayName(api, system, systemLabel); + const systemRoot = isUserHomeSystem(system) ? '/' + user?.username : ''; return ( diff --git a/client/modules/datafiles/src/FileListing/FileListingTable/FileListingTable.module.css b/client/modules/_common_components/src/datafiles/FileListingTable/FileListingTable.module.css similarity index 100% rename from client/modules/datafiles/src/FileListing/FileListingTable/FileListingTable.module.css rename to client/modules/_common_components/src/datafiles/FileListingTable/FileListingTable.module.css diff --git a/client/modules/_common_components/src/datafiles/FileListingTable/FileListingTable.tsx b/client/modules/_common_components/src/datafiles/FileListingTable/FileListingTable.tsx new file mode 100644 index 0000000000..668b752d88 --- /dev/null +++ b/client/modules/_common_components/src/datafiles/FileListingTable/FileListingTable.tsx @@ -0,0 +1,216 @@ +import React, { useCallback, useEffect, useMemo, useState } from 'react'; +import styles from './FileListingTable.module.css'; +import { Alert, Table, TableColumnType, TableProps } from 'antd'; +import { + useFileListing, + TFileListing, + useSelectedFiles, + useDoiContext, +} from '@client/hooks'; +import { FileListingTableCheckbox } from './FileListingTableCheckbox'; +import parse from 'html-react-parser'; + +type TableRef = { + nativeElement: HTMLDivElement; + scrollTo: (config: { index?: number; key?: React.Key; top?: number }) => void; +}; + +export type TFileListingColumns = (TableColumnType & { + dataIndex: keyof TFileListing; +})[]; + +export const FileListingTable: React.FC< + { + api: string; + system?: string; + path?: string; + scheme?: string; + columns: TFileListingColumns; + filterFn?: (listing: TFileListing[]) => TFileListing[]; + disabled?: boolean; + className?: string; + emptyListingDisplay?: React.ReactNode; + noSelection?: boolean; + searchTerm?: string | null; + currentDisplayPath?: TFileListing | undefined; + } & Omit +> = ({ + api, + system, + path = '', + scheme = 'private', + filterFn, + columns, + disabled = false, + className, + emptyListingDisplay, + searchTerm = '', + noSelection, + currentDisplayPath = null, + ...props +}) => { + const limit = 100; + const [scrollElement, setScrollElement] = useState( + undefined + ); + + /* FETCH FILE LISTINGS */ + const { + data, + isLoading, + error, + isFetchingNextPage, + hasNextPage, + fetchNextPage, + } = useFileListing({ + api, + system: system ?? '-', + path: path ?? '', + scheme, + disabled, + searchTerm, + pageSize: limit, + }); + + const combinedListing = useMemo(() => { + const cl: TFileListing[] = []; + data?.pages.forEach((page) => cl.push(...page.listing)); + if (filterFn) { + return filterFn(cl); + } + if (currentDisplayPath) { + return [currentDisplayPath, ...cl]; + } + + return cl; + }, [data, filterFn, currentDisplayPath]); + + /* HANDLE FILE SELECTION */ + const doi = useDoiContext(); + const { selectedFiles, setSelectedFiles } = useSelectedFiles( + api, + system ?? '-', + path + ); + const onSelectionChange = useCallback( + (_: React.Key[], selection: TFileListing[]) => { + setSelectedFiles(doi ? selection.map((s) => ({ ...s, doi })) : selection); + }, + [setSelectedFiles, doi] + ); + const selectedRowKeys = useMemo( + () => selectedFiles.map((s) => s.path), + [selectedFiles] + ); + + /* HANDLE INFINITE SCROLL */ + const scrollRefCallback = useCallback( + (node: TableRef) => { + if (node !== null) { + const lastRow = node.nativeElement.querySelectorAll( + '.ant-table-row:last-child' + )[0]; + setScrollElement(lastRow); + } + }, + [setScrollElement] + ); + useEffect(() => { + // Set and clean up scroll event listener on the table ref. + const observer = new IntersectionObserver((entries) => { + // Fetch the next page when the final listing item enters the viewport. + entries.forEach((entry) => { + if ( + entry.isIntersecting && + hasNextPage && + !(isFetchingNextPage || isLoading) + ) { + fetchNextPage(); + } + }); + }); + scrollElement && observer.observe(scrollElement); + + return () => { + observer.disconnect(); + }; + }, [ + scrollElement, + hasNextPage, + fetchNextPage, + isFetchingNextPage, + isLoading, + ]); + + /* RENDER THE TABLE */ + return ( + 0 ? 'table--pull-spinner-bottom' : '' + } ${className ?? ''}`} + rowSelection={ + noSelection + ? undefined + : { + type: 'checkbox', + onChange: onSelectionChange, + selectedRowKeys, + renderCell: (checked, _rc, _idx, node) => ( + + ), + } + } + scroll={{ y: '100%', x: '1000px' }} // set to undefined to disable sticky header + columns={columns} + rowKey={(record) => record.path} + dataSource={combinedListing} + pagination={false} + loading={isLoading || isFetchingNextPage} + locale={{ + emptyText: + isLoading || isFetchingNextPage ? ( +
 
+ ) : ( + <> + {error && ( + + {parse(error.response?.data.message ?? '')} + {system?.includes('project') && ( +
+ + If this is a newly created project, it may take a + few minutes for file system permissions to + propagate. + +
+ )} + + } + /> + )} + {!error && ( + + )} + + ), + }} + {...props} + > + placeholder +
+ ); +}; diff --git a/client/modules/datafiles/src/FileListing/FileListingTable/FileListingTableCheckbox.tsx b/client/modules/_common_components/src/datafiles/FileListingTable/FileListingTableCheckbox.tsx similarity index 100% rename from client/modules/datafiles/src/FileListing/FileListingTable/FileListingTableCheckbox.tsx rename to client/modules/_common_components/src/datafiles/FileListingTable/FileListingTableCheckbox.tsx diff --git a/client/modules/_common_components/src/datafiles/FileTypeIcon/FileTypeIcon.tsx b/client/modules/_common_components/src/datafiles/FileTypeIcon/FileTypeIcon.tsx new file mode 100644 index 0000000000..cf017b3339 --- /dev/null +++ b/client/modules/_common_components/src/datafiles/FileTypeIcon/FileTypeIcon.tsx @@ -0,0 +1,62 @@ +function icon(name: string, type?: string) { + if (type === 'dir' || type === 'folder') { + return 'fa-folder'; + } + const ext = (name.split('.').pop() ?? '').toLowerCase(); + + switch (ext) { + case 'zip': + case 'tar': + case 'gz': + case 'bz2': + return 'fa-file-archive-o'; + case 'png': + case 'jpg': + case 'jpeg': + case 'gif': + case 'tif': + case 'tiff': + return 'fa-file-image-o'; + case 'pdf': + return 'fa-file-pdf-o'; + case 'doc': + case 'docx': + return 'fa-file-word-o'; + case 'xls': + case 'xlsx': + return 'fa-file-excel-o'; + case 'ppt': + case 'pptx': + return 'fa-file-powerpoint-o'; + case 'ogg': + case 'webm': + case 'mp4': + return 'fa-file-video-o'; + case 'mp3': + case 'wav': + return 'fa-file-audio-o'; + case 'txt': + case 'out': + case 'err': + return 'fa-file-text-o'; + case 'tcl': + case 'sh': + case 'json': + return 'fa-file-code-o'; + case 'geojson': + case 'kml': + case 'kmz': + return 'fa-map-o'; + default: + return 'fa-file-o'; + } +} + +export const FileTypeIcon: React.FC<{ name: string; type?: string }> = ({ + name, + type, +}) => { + const iconClassName = icon(name, type); + const className = `fa ${iconClassName}`; + return ; +}; diff --git a/client/modules/_common_components/src/datafiles/fileUtils.ts b/client/modules/_common_components/src/datafiles/fileUtils.ts new file mode 100644 index 0000000000..be932895a8 --- /dev/null +++ b/client/modules/_common_components/src/datafiles/fileUtils.ts @@ -0,0 +1,9 @@ +export function toBytes(bytes?: number) { + if (bytes === 0) return '0 bytes'; + if (!bytes) return '-'; + const units = ['bytes', 'kB', 'MB', 'GB', 'TB', 'PB']; + const orderOfMagnitude = Math.floor(Math.log(bytes) / Math.log(1024)); + const precision = orderOfMagnitude === 0 ? 0 : 1; + const bytesInUnits = bytes / Math.pow(1024, orderOfMagnitude); + return `${bytesInUnits.toFixed(precision)} ${units[orderOfMagnitude]}`; +} diff --git a/client/modules/_common_components/src/datafiles/index.ts b/client/modules/_common_components/src/datafiles/index.ts new file mode 100644 index 0000000000..c0114b5b1f --- /dev/null +++ b/client/modules/_common_components/src/datafiles/index.ts @@ -0,0 +1,4 @@ +export * from './FileListingTable/FileListingTable'; +export * from './DatafilesBreadcrumb/DatafilesBreadcrumb'; +export * from './FileTypeIcon/FileTypeIcon'; +export { toBytes } from './fileUtils'; diff --git a/client/modules/_common_components/src/index.ts b/client/modules/_common_components/src/index.ts index e84894599d..f93d99586e 100644 --- a/client/modules/_common_components/src/index.ts +++ b/client/modules/_common_components/src/index.ts @@ -1 +1,4 @@ -export * from './lib/common-components'; +export * from './datafiles'; +export { PrimaryButton, SecondaryButton } from './lib/Button'; +export { Icon } from './lib/Icon'; +export { Spinner } from './lib/Spinner'; diff --git a/client/modules/_common_components/src/lib/Button/Button.module.css b/client/modules/_common_components/src/lib/Button/Button.module.css new file mode 100644 index 0000000000..85dc4e27fc --- /dev/null +++ b/client/modules/_common_components/src/lib/Button/Button.module.css @@ -0,0 +1,3 @@ +.root { + min-width: 100px; +} diff --git a/client/modules/_common_components/src/lib/Button/Button.tsx b/client/modules/_common_components/src/lib/Button/Button.tsx new file mode 100644 index 0000000000..1ada660abe --- /dev/null +++ b/client/modules/_common_components/src/lib/Button/Button.tsx @@ -0,0 +1,49 @@ +import React from 'react'; +import { Button, ButtonProps, ConfigProvider, ThemeConfig } from 'antd'; +import styles from './Button.module.css'; + +const secondaryTheme: ThemeConfig = { + components: { + Button: { + defaultActiveBg: '#f4f4f4', + defaultActiveColor: '#222', + defaultActiveBorderColor: '#026', + defaultBg: '#f4f4f4', + defaultBorderColor: '#222222', + defaultColor: '#222222', + defaultHoverBg: '#aac7ff', + }, + }, +}; + +export const SecondaryButton: React.FC = (props) => { + return ( + + - - )} + ); }; diff --git a/client/modules/datafiles/src/DatafilesHelpDropdown/DatafilesHelpDropdown.module.css b/client/modules/datafiles/src/DatafilesHelpDropdown/DatafilesHelpDropdown.module.css new file mode 100644 index 0000000000..4816f75235 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesHelpDropdown/DatafilesHelpDropdown.module.css @@ -0,0 +1,10 @@ +.datafilesHelp div { + text-align: center; + line-height: 20px; +} + +.datafilesHelp a { + text-decoration: none; + color: unset; + white-space: nowrap; +} diff --git a/client/modules/datafiles/src/DatafilesHelpDropdown/DatafilesHelpDropdown.tsx b/client/modules/datafiles/src/DatafilesHelpDropdown/DatafilesHelpDropdown.tsx new file mode 100644 index 0000000000..8ee66a2a83 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesHelpDropdown/DatafilesHelpDropdown.tsx @@ -0,0 +1,130 @@ +import { Button, Dropdown } from 'antd'; +import React from 'react'; +import styles from './DatafilesHelpDropdown.module.css'; + +const items = [ + { + label: ( + +
Curation Tutorials
+
+ ), + key: '1', + }, + { + label: ( + +
Curation Guidelines
+
+ ), + key: '2', + }, + { + label: ( + +
+ Learn About the
+ Data Depot +
+
+ ), + key: '3', + }, + { + label: ( + +
Data Transfer Guide
+
+ ), + key: '4', + }, + { + label: ( + +
Curation FAQ
+
+ ), + key: '5', + }, + { + label: ( + +
+ How to Acknowledge
+ DesignSafe-CI +
+
+ ), + key: '6', + }, + { + label: ( + +
Data Usage Agreement
+
+ ), + key: '7', + }, + { + label: ( + +
FAQ
+
+ ), + key: '8', + }, +]; + +export const DatafilesHelpDropdown: React.FC = () => { + return ( + + + + ); +}; diff --git a/client/modules/datafiles/src/DatafilesModal/CopyModal/CopyModal.tsx b/client/modules/datafiles/src/DatafilesModal/CopyModal/CopyModal.tsx index 95fae57d4d..5afa8d33a1 100644 --- a/client/modules/datafiles/src/DatafilesModal/CopyModal/CopyModal.tsx +++ b/client/modules/datafiles/src/DatafilesModal/CopyModal/CopyModal.tsx @@ -2,16 +2,17 @@ import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { TModalChildren } from '../DatafilesModal'; import { Button, Modal, Select, Table } from 'antd'; import { + TFileListing, useAuthenticatedUser, useFileCopy, usePathDisplayName, - useSelectedFiles, } from '@client/hooks'; import { FileListingTable, + FileTypeIcon, TFileListingColumns, -} from '../../FileListing/FileListingTable/FileListingTable'; -import { BaseFileListingBreadcrumb } from '../../DatafilesBreadcrumb/DatafilesBreadcrumb'; +} from '@client/common-components'; +import { BaseFileListingBreadcrumb } from '@client/common-components'; import styles from './CopyModal.module.css'; import { toBytes } from '../../FileListing/FileListing'; import { CopyModalProjectListing } from './CopyModalProjectListing'; @@ -20,6 +21,12 @@ const SelectedFilesColumns: TFileListingColumns = [ { title: 'Files/Folders to Copy', dataIndex: 'name', + render: (value, record) => ( + + +   {value} + + ), }, { title: , @@ -106,14 +113,14 @@ export const CopyModal: React.FC<{ api: string; system: string; path: string; + selectedFiles: TFileListing[]; children: TModalChildren; -}> = ({ api, system, path, children }) => { +}> = ({ api, system, path, selectedFiles, children }) => { const [isModalOpen, setIsModalOpen] = useState(false); const showModal = () => setIsModalOpen(true); const handleClose = () => setIsModalOpen(false); - const { selectedFiles } = useSelectedFiles(api, system, path); const { user } = useAuthenticatedUser(); const defaultDestParams = useMemo( @@ -190,6 +197,7 @@ export const CopyModal: React.FC<{ mutate({ src: { api, system, path: encodeURIComponent(f.path) }, dest: { api: destApi, system: destSystem, path: dPath }, + doi: f.doi, }) ); handleClose(); @@ -256,7 +264,7 @@ export const CopyModal: React.FC<{ f.type === 'dir') } scroll={undefined} + emptyListingDisplay="No folders to display." /> diff --git a/client/modules/datafiles/src/DatafilesModal/DatafilesModal.tsx b/client/modules/datafiles/src/DatafilesModal/DatafilesModal.tsx index 0a250a2ee3..30458b5ca0 100644 --- a/client/modules/datafiles/src/DatafilesModal/DatafilesModal.tsx +++ b/client/modules/datafiles/src/DatafilesModal/DatafilesModal.tsx @@ -5,6 +5,8 @@ import { RenameModal } from './RenameModal'; import { NewFolderModal } from './NewFolderModal'; import { UploadFileModal } from './UploadFileModal'; import { UploadFolderModal } from './UploadFolderModal'; +import { MoveModal } from './MoveModal'; +import { DownloadModal } from './DownloadModal'; export type TModalChildren = (props: { onClick: React.MouseEventHandler; @@ -18,5 +20,7 @@ DatafilesModal.Rename = RenameModal; DatafilesModal.NewFolder = NewFolderModal; DatafilesModal.UploadFile = UploadFileModal; DatafilesModal.UploadFolder = UploadFolderModal; +DatafilesModal.Move = MoveModal; +DatafilesModal.Download = DownloadModal; export default DatafilesModal; diff --git a/client/modules/datafiles/src/DatafilesModal/DownloadModal/DownloadModal.tsx b/client/modules/datafiles/src/DatafilesModal/DownloadModal/DownloadModal.tsx new file mode 100644 index 0000000000..6be4e00da0 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesModal/DownloadModal/DownloadModal.tsx @@ -0,0 +1,75 @@ +import { Modal } from 'antd'; +import React, { useState } from 'react'; +import { TModalChildren } from '../DatafilesModal'; +import { TFileListing, apiClient } from '@client/hooks'; + +export const DownloadModal: React.FC<{ + api: string; + system: string; + scheme?: string; + selectedFiles: TFileListing[]; + children: TModalChildren; +}> = ({ api, system, scheme, selectedFiles, children }) => { + const [isModalOpen, setIsModalOpen] = useState(false); + + const doiArray = selectedFiles.filter((f) => f.doi).map((f) => f.doi); + + const doiString = [...new Set(doiArray)].join(','); + + const showModal = () => { + setIsModalOpen(true); + }; + + const zipUrl = `/api/datafiles/${api}/${ + scheme ?? 'public' + }/download/${system}/?doi=${doiString}`; + + const handleDownload = () => { + apiClient + .put(zipUrl, { paths: selectedFiles.map((f) => f.path) }) + .then((resp) => { + const link = document.createElement('a'); + link.style.display = 'none'; + link.setAttribute('href', resp.data.href); + link.setAttribute('download', 'null'); + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + }) + .catch((e) => { + console.log(e); + if (e.response.status === 413) { + showModal(); + } + }); + }; + + const handleCancel = () => { + setIsModalOpen(false); + }; + + return ( + <> + {React.createElement(children, { onClick: handleDownload })} + Data Transfer Help} + onCancel={handleCancel} + cancelButtonProps={{ hidden: true }} + onOk={handleCancel} + > +

+ The data set that you are attempting to download is too large for a + direct download. Direct downloads are supported for up to 2 gigabytes + of data at a time. Alternative approaches for transferring large + amounts of data are provided in the Large Data Transfer Methods + section of the Data Transfer Guide ( + + https://www.designsafe-ci.org/rw/user-guides/data-transfer-guide/ + + ). +

+
+ + ); +}; diff --git a/client/modules/datafiles/src/DatafilesModal/DownloadModal/index.ts b/client/modules/datafiles/src/DatafilesModal/DownloadModal/index.ts new file mode 100644 index 0000000000..11db946700 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesModal/DownloadModal/index.ts @@ -0,0 +1 @@ +export * from './DownloadModal'; diff --git a/client/modules/datafiles/src/DatafilesModal/MoveModal/MoveModal.module.css b/client/modules/datafiles/src/DatafilesModal/MoveModal/MoveModal.module.css new file mode 100644 index 0000000000..5ceac66cca --- /dev/null +++ b/client/modules/datafiles/src/DatafilesModal/MoveModal/MoveModal.module.css @@ -0,0 +1,41 @@ +.copyModalContent { + display: flex; + max-height: 60vh; + min-height: 400px; + gap: 50px; +} + +.copyModalContent td { + vertical-align: middle; +} + +.srcFilesSection { + flex: 1; + overflow: auto; + border: 1px solid #707070; +} + +.srcFilesTable { + height: 100%; +} + +.modalRightPanel { + display: flex; + flex: 1; + flex-direction: column; +} + +.destFilesSection { + display: flex; + flex: 1; + flex-direction: column; + overflow: auto; + border: 1px solid #707070; +} + +.destFilesTableContainer { + display: flex; + flex: 1; + flex-direction: column; + overflow: auto; +} diff --git a/client/modules/datafiles/src/DatafilesModal/MoveModal/MoveModal.tsx b/client/modules/datafiles/src/DatafilesModal/MoveModal/MoveModal.tsx new file mode 100644 index 0000000000..1bad6efdd3 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesModal/MoveModal/MoveModal.tsx @@ -0,0 +1,285 @@ +import React, { useCallback, useEffect, useMemo, useState } from 'react'; +import { TModalChildren } from '../DatafilesModal'; +import { Alert, Button, Modal, Table } from 'antd'; +import { + TFileListing, + useCheckFilesForAssociation, + useFileMove, + usePathDisplayName, +} from '@client/hooks'; + +import { + BaseFileListingBreadcrumb, + FileTypeIcon, +} from '@client/common-components'; +import styles from './MoveModal.module.css'; +import { toBytes } from '../../FileListing/FileListing'; +import { + FileListingTable, + TFileListingColumns, +} from '@client/common-components'; +import { useParams } from 'react-router-dom'; + +const SelectedFilesColumns: TFileListingColumns = [ + { + title: 'Files/Folders to Move', + dataIndex: 'name', + render: (value, record) => ( + + +   {value} + + ), + }, + { + title: , + dataIndex: 'length', + render: (value) => toBytes(value), + }, +]; + +const DestHeaderTitle: React.FC<{ + api: string; + system: string; + path: string; + projectId?: string; +}> = ({ api, system, path, projectId }) => { + const getPathName = usePathDisplayName(); + return ( + + +    + + {projectId || getPathName(api, system, path)} + + ); +}; + +function getDestFilesColumns( + api: string, + system: string, + path: string, + mutationCallback: (path: string) => void, + navCallback: (path: string) => void, + projectId?: string, + disabled?: boolean +): TFileListingColumns { + return [ + { + title: ( + + ), + dataIndex: 'name', + ellipsis: true, + + render: (data, record) => ( + + ), + }, + { + dataIndex: 'path', + align: 'end', + title: ( + + ), + render: (_, record) => ( + + ), + }, + ]; +} + +export const MoveModal: React.FC<{ + api: string; + system: string; + path: string; + selectedFiles: TFileListing[]; + successCallback?: CallableFunction; + children: TModalChildren; +}> = ({ api, system, path, selectedFiles, successCallback, children }) => { + const [isModalOpen, setIsModalOpen] = useState(false); + + const showModal = () => setIsModalOpen(true); + const handleClose = () => setIsModalOpen(false); + + let { projectId } = useParams(); + if (!projectId) projectId = ''; + + const hasAssociations = useCheckFilesForAssociation( + projectId, + selectedFiles.map((f) => f.path) + ); + + const defaultDestParams = useMemo( + () => ({ + destApi: api, + destSystem: system, + destPath: path, + destProjectId: projectId, + }), + [api, system, path, projectId] + ); + + const [dest, setDest] = useState<{ + destApi: string; + destSystem: string; + destPath: string; + destProjectId?: string; + }>(defaultDestParams); + const { destApi, destSystem, destPath } = dest; + useEffect(() => setDest(defaultDestParams), [isModalOpen, defaultDestParams]); + + const navCallback = useCallback( + (path: string) => { + const newPath = path.split('/').slice(-1)[0]; + setDest({ ...dest, destPath: newPath }); + }, + [dest] + ); + const { mutate } = useFileMove(); + + const mutateCallback = useCallback( + (dPath: string) => { + selectedFiles.forEach((f) => + mutate( + { + src: { api, system, path: encodeURIComponent(f.path) }, + dest: { api: destApi, system: destSystem, path: dPath }, + }, + { onSuccess: () => successCallback && successCallback() } + ) + ); + handleClose(); + }, + [selectedFiles, mutate, destApi, destSystem, successCallback, api, system] + ); + + const DestFilesColumns = useMemo( + () => + getDestFilesColumns( + destApi, + destSystem, + destPath, + (dPath: string) => mutateCallback(dPath), + navCallback, + dest.destProjectId, + hasAssociations + ), + [ + navCallback, + destApi, + destSystem, + destPath, + dest.destProjectId, + mutateCallback, + hasAssociations, + ] + ); + + return ( + <> + {React.createElement(children, { onClick: showModal })} + Move Files} + footer={null} + > + {hasAssociations && ( + + This file or folder cannot be moved until its tags or associated + entities have been removed using the Curation Directory tab. + + } + /> + )} +
+
+ record.path} + scroll={{ y: '100%' }} + /> + +
+
+ { + return ( + + ); + }} + /> +
+ + listing.filter( + (f) => + f.type === 'dir' && + !selectedFiles.map((sf) => sf.path).includes(f.path) + ) + } + emptyListingDisplay="No folders to display." + scroll={undefined} + /> +
+
+
+ + + + ); +}; diff --git a/client/modules/datafiles/src/DatafilesModal/MoveModal/index.ts b/client/modules/datafiles/src/DatafilesModal/MoveModal/index.ts new file mode 100644 index 0000000000..4200e93362 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesModal/MoveModal/index.ts @@ -0,0 +1 @@ +export * from './MoveModal'; diff --git a/client/modules/datafiles/src/DatafilesModal/NewFolderModal/NewFolderModal.tsx b/client/modules/datafiles/src/DatafilesModal/NewFolderModal/NewFolderModal.tsx index 2ab487fb7b..d98a98effc 100644 --- a/client/modules/datafiles/src/DatafilesModal/NewFolderModal/NewFolderModal.tsx +++ b/client/modules/datafiles/src/DatafilesModal/NewFolderModal/NewFolderModal.tsx @@ -16,21 +16,19 @@ export const NewFolderModalBody: React.FC<{ const handleNewFolderFinish = async (values: { newFolder: string }) => { const newFolder = values.newFolder; - try { - await mutate({ + mutate( + { src: { api, system, path, dirName: newFolder, }, - }); + }, + { onSuccess: () => handleCancel() } + ); - handleCancel(); // Close the modal after creating new folder - } catch (error) { - console.error('Error during form submission:', error); - // Handle error if needed - } + // Close the modal after creating new folder }; const validateNewFolder = (_: unknown, value: string) => { diff --git a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewContent.tsx b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewContent.tsx index 41a61ae3aa..b3989d9a8e 100644 --- a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewContent.tsx +++ b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewContent.tsx @@ -1,5 +1,6 @@ import { useConsumePostit, TPreviewFileType } from '@client/hooks'; -import { Spin } from 'antd'; +import { Alert, Spin } from 'antd'; +import { HAZMAPPER_BASE_URL_MAP } from '../../projects/utils'; import React, { useState } from 'react'; import styles from './PreviewModal.module.css'; @@ -10,18 +11,32 @@ export const PreviewSpinner: React.FC = () => ( export type TPreviewContent = React.FC<{ href: string; fileType: TPreviewFileType; + handleCancel: () => void; }>; -export const PreviewContent: TPreviewContent = ({ href, fileType }) => { +export const PreviewContent: TPreviewContent = ({ + href, + fileType, + handleCancel, +}) => { const [iframeLoading, setIframeLoading] = useState(true); const { data: PostitData, isLoading: isConsumingPostit } = useConsumePostit({ href, responseType: fileType === 'video' ? 'blob' : 'text', queryOptions: { - enabled: (!!href && fileType === 'text') || fileType === 'video', + enabled: + (!!href && fileType === 'text') || + fileType === 'video' || + fileType === 'hazmapper', }, }); + if (isConsumingPostit && fileType === 'hazmapper') + return ( + <> +

Opening in Hazmapper ...

+ + ); if (isConsumingPostit) return ; switch (fileType) { @@ -68,7 +83,33 @@ export const PreviewContent: TPreviewContent = ({ href, fileType }) => { > ); + case 'hazmapper': + { + if (!PostitData) return; + const body = JSON.parse(PostitData as string); + let baseUrl = + HAZMAPPER_BASE_URL_MAP[ + body.deployment as keyof typeof HAZMAPPER_BASE_URL_MAP + ]; + if (!baseUrl) { + console.error( + `Invalid deployment type: ${body.deployment}. Falling back to local` + ); + baseUrl = HAZMAPPER_BASE_URL_MAP['local']; + } + window.open(`${baseUrl}/project/${body.uuid}`, '_blank'); + handleCancel(); + } + break; default: - return Error.; + return ( + + ); } }; diff --git a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewMetadata.tsx b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewMetadata.tsx new file mode 100644 index 0000000000..672d117163 --- /dev/null +++ b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewMetadata.tsx @@ -0,0 +1,62 @@ +import { Collapse, Table, TableProps } from 'antd'; +import React from 'react'; +import styles from './PreviewModal.module.css'; +import { TFileListing, useFileDetail } from '@client/hooks'; +import { toBytes } from '../../FileListing/FileListing'; + +const tableColumns: TableProps['columns'] = [ + { dataIndex: 'key', render: (value) => {value}, width: 200 }, + { dataIndex: 'value' }, +]; + +export const PreviewMetadata: React.FC<{ + selectedFile: TFileListing; + fileMeta: Record; +}> = ({ selectedFile, fileMeta }) => { + const { data: fileListingMeta } = useFileDetail( + 'tapis', + selectedFile.system, + 'private', + selectedFile.path + ); + + const baseListingMeta = [ + { key: 'File Name', value: fileListingMeta?.name }, + { key: 'File Path', value: fileListingMeta?.path }, + { key: 'File Size', value: toBytes(fileListingMeta?.length) }, + { + key: 'Last Modified', + value: + fileListingMeta?.lastModified && + new Date(fileListingMeta.lastModified).toLocaleString(), + }, + ]; + + const fullListingMeta = [ + ...baseListingMeta, + ...Object.keys(fileMeta).map((k) => ({ key: k, value: fileMeta[k] })), + ]; + + return ( + + File Metadata + + ), + children: ( +
+ ), + }, + ]} + /> + ); +}; diff --git a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.module.css b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.module.css index 1e5a1b0a3a..9449f9c4a0 100644 --- a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.module.css +++ b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.module.css @@ -27,3 +27,18 @@ .previewContainer img { width: 100%; } + +.metadataCollapse { + background-color: 'black'; +} + +.metadataCollapse :global(.ant-collapse-content-box) { + padding: 0px !important; +} +.metadataCollapse :global(.ant-table-thead) { + display: none; +} + +.metadataCollapse :global(.ant-table-tbody) tr:nth-child(odd) td { + background-color: white; +} diff --git a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.tsx b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.tsx index 90349ddc82..767bec7860 100644 --- a/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.tsx +++ b/client/modules/datafiles/src/DatafilesModal/PreviewModal/PreviewModal.tsx @@ -1,19 +1,27 @@ import { useQueryClient } from '@tanstack/react-query'; -import { useFilePreview } from '@client/hooks'; +import { + TFileListing, + useFileListingRouteParams, + useFilePreview, +} from '@client/hooks'; import { Button, Modal } from 'antd'; import React, { useCallback, useState } from 'react'; import styles from './PreviewModal.module.css'; import { TModalChildren } from '../DatafilesModal'; import { PreviewSpinner, PreviewContent } from './PreviewContent'; +import { PreviewMetadata } from './PreviewMetadata'; +import { CopyModal } from '../CopyModal'; +import { DownloadModal } from '../DownloadModal'; +import { MoveModal } from '../MoveModal'; export const PreviewModalBody: React.FC<{ isOpen: boolean; api: string; - system: string; scheme?: string; - path: string; + + selectedFile: TFileListing; handleCancel: () => void; -}> = ({ isOpen, api, system, scheme, path, handleCancel }) => { +}> = ({ isOpen, api, scheme, selectedFile, handleCancel }) => { /* Typically modals are rendered in the same component as the button that manages the open/closed state. The modal body is exported separately for file previews, since @@ -22,12 +30,14 @@ export const PreviewModalBody: React.FC<{ const queryClient = useQueryClient(); const { data, isLoading } = useFilePreview({ api, - system, + system: selectedFile.system, scheme, - path, + path: selectedFile.path, + doi: selectedFile.doi, queryOptions: { enabled: isOpen }, }); + const { path: listingPath } = useFileListingRouteParams(); const handleClose = useCallback(() => { // Flush queries on close to prevent stale postits being read from cache. queryClient.removeQueries({ queryKey: ['datafiles', 'preview'] }); @@ -38,7 +48,7 @@ export const PreviewModalBody: React.FC<{ return ( File Preview: {path}} + title={

File Preview: {selectedFile.path.split('/').slice(-1)}

} width="60%" open={isOpen} footer={() => ( @@ -48,12 +58,72 @@ export const PreviewModalBody: React.FC<{ )} onCancel={handleClose} > + +
+ {!selectedFile.path.endsWith('.hazmapper') && ( + <> + {scheme === 'private' && api === 'tapis' && ( + + {({ onClick }) => ( + + )} + + )} + + + {({ onClick }) => ( + + )} + + + {({ onClick }) => ( + + )} + + + )} +
{isLoading && } {data && isOpen && ( )}
@@ -63,16 +133,14 @@ export const PreviewModalBody: React.FC<{ type TPreviewModal = React.FC<{ api: string; - system: string; scheme?: string; - path: string; + selectedFile: TFileListing; children: TModalChildren; }>; export const PreviewModal: TPreviewModal = ({ api, - system, scheme, - path, + selectedFile, children, }) => { const [isModalOpen, setIsModalOpen] = useState(false); @@ -91,9 +159,8 @@ export const PreviewModal: TPreviewModal = ({ {isModalOpen && ( diff --git a/client/modules/datafiles/src/DatafilesModal/RenameModal/RenameModal.tsx b/client/modules/datafiles/src/DatafilesModal/RenameModal/RenameModal.tsx index 5adeb94938..34d8a10451 100644 --- a/client/modules/datafiles/src/DatafilesModal/RenameModal/RenameModal.tsx +++ b/client/modules/datafiles/src/DatafilesModal/RenameModal/RenameModal.tsx @@ -1,7 +1,12 @@ -import { Button, Modal, Form, Input } from 'antd'; -import { useSelectedFiles, useRename } from '@client/hooks'; +import { Button, Modal, Form, Input, Alert } from 'antd'; +import { + useSelectedFiles, + useRename, + useCheckFilesForAssociation, +} from '@client/hooks'; import React, { useState } from 'react'; import { TModalChildren } from '../DatafilesModal'; +import { useParams } from 'react-router-dom'; export const RenameModalBody: React.FC<{ isOpen: boolean; @@ -17,17 +22,34 @@ export const RenameModalBody: React.FC<{ const { mutate } = useRename(); + let { projectId } = useParams(); + if (!projectId) projectId = ''; + + const hasAssociations = useCheckFilesForAssociation( + projectId, + selectedFiles.map((f) => f.path) + ); + const handleRenameFinish = async (values: { newName: string }) => { + const originalName = selectedFiles[0].name; const newName = values.newName; + const extension = originalName.includes('.') + ? originalName.substring(originalName.lastIndexOf('.')) + : ''; + + const fullName = newName.endsWith(extension) + ? newName + : newName + extension; + try { await mutate({ src: { api, system, path, - name: selectedFiles[0].name, - newName: newName, + name: originalName, + newName: fullName, }, }); @@ -61,34 +83,56 @@ export const RenameModalBody: React.FC<{ return ( Rename {selectedFilesName[0].name}} + title={

Rename {selectedFilesName[0]?.name}

} width="60%" open={isOpen} + destroyOnClose footer={null} // Remove the footer from here onCancel={handleCancel} > -
- + This file or folder cannot be renamed until its tags or associated + entities have been removed using the Curation Directory tab. + + } + /> + )} + {isOpen && ( + - - - -
- -
- + + + + +
+ +
+ + )}
); }; diff --git a/client/modules/datafiles/src/DatafilesModal/UploadFileModal/UploadFileModal.tsx b/client/modules/datafiles/src/DatafilesModal/UploadFileModal/UploadFileModal.tsx index 054b40d22f..f7d56b3cb3 100644 --- a/client/modules/datafiles/src/DatafilesModal/UploadFileModal/UploadFileModal.tsx +++ b/client/modules/datafiles/src/DatafilesModal/UploadFileModal/UploadFileModal.tsx @@ -16,7 +16,7 @@ export const UploadFileModalBody: React.FC<{ path: string; handleCancel: () => void; }> = ({ isOpen, api, system, scheme, path, handleCancel }) => { - const { mutate } = useUploadFile(); + const { mutateAsync } = useUploadFile(); const [fileList, setFileList] = useState([]); const [uploading, setUploading] = useState(false); @@ -36,7 +36,7 @@ export const UploadFileModalBody: React.FC<{ formData.append('file_name', fileList[i].name); formData.append('webkit_relative_path', ''); - await mutate({ + await mutateAsync({ api, system, scheme: 'private', // Optional @@ -47,6 +47,7 @@ export const UploadFileModalBody: React.FC<{ // All files uploaded successfully, close the modal setUploading(false); + setFileList([]); handleCancel(); } catch (error) { console.error('Error during form submission:', error); @@ -82,7 +83,10 @@ export const UploadFileModalBody: React.FC<{ width="60%" open={isOpen} footer={null} // Remove the footer from here - onCancel={handleCancel} + onCancel={() => { + handleCancel(); + handleReset(); + }} >
diff --git a/client/modules/datafiles/src/DatafilesSideNav/DatafilesSideNav.tsx b/client/modules/datafiles/src/DatafilesSideNav/DatafilesSideNav.tsx index 7ddba05d1e..f9412bed83 100644 --- a/client/modules/datafiles/src/DatafilesSideNav/DatafilesSideNav.tsx +++ b/client/modules/datafiles/src/DatafilesSideNav/DatafilesSideNav.tsx @@ -2,16 +2,18 @@ import React from 'react'; import { NavLink } from 'react-router-dom'; import styles from './DatafilesSideNav.module.css'; import { useAuthenticatedUser } from '@client/hooks'; +import { Tooltip } from 'antd'; -const DataFilesNavLink: React.FC> = ({ - to, - children, -}) => { +const DataFilesNavLink: React.FC< + React.PropsWithChildren<{ to: string; tooltip?: string }> +> = ({ to, tooltip, children }) => { return (
  • - -
    {children}
    -
    + + +
    {children}
    +
    +
  • ); }; @@ -28,37 +30,70 @@ export const DatafilesSideNav: React.FC = () => { > {user && ( <> - + My Data - + + HPC Work - My Projects - - Shared with Me + + My Projects
    - Box.com - Dropbox.com - Google Drive + + Box.com + + + Dropbox.com + + + Google Drive +
    )} - + Published
    - + Published (NEES) - + Community Data diff --git a/client/modules/datafiles/src/DatafilesToolbar/DatafilesToolbar.tsx b/client/modules/datafiles/src/DatafilesToolbar/DatafilesToolbar.tsx index 31b6341d67..8b08bbc73f 100644 --- a/client/modules/datafiles/src/DatafilesToolbar/DatafilesToolbar.tsx +++ b/client/modules/datafiles/src/DatafilesToolbar/DatafilesToolbar.tsx @@ -3,16 +3,20 @@ import styles from './DatafilesToolbar.module.css'; import { useAuthenticatedUser, useFileListingRouteParams, + useProjectDetail, useSelectedFiles, + useSelectedFilesForSystem, } from '@client/hooks'; import DatafilesModal from '../DatafilesModal/DatafilesModal'; import TrashButton from './TrashButton'; import { Button, ButtonProps, ConfigProvider, ThemeConfig } from 'antd'; +import { useMatches, useParams } from 'react-router-dom'; const toolbarTheme: ThemeConfig = { components: { Button: { colorPrimaryHover: 'rgba(0, 0, 0, 0.88)', + motionDurationMid: '0', }, }, }; @@ -27,22 +31,83 @@ const ToolbarButton: React.FC = (props) => { export const DatafilesToolbar: React.FC<{ searchInput?: React.ReactNode }> = ({ searchInput, }) => { - const { api, system, scheme, path } = useFileListingRouteParams(); - const { selectedFiles } = useSelectedFiles(api, system, path); + const routeParams = useFileListingRouteParams(); + const { scheme } = routeParams; + let { api, system, path } = routeParams; + const { neesid } = useParams(); + let { projectId } = useParams(); + const { user } = useAuthenticatedUser(); + const matches = useMatches(); + const isProjects = matches.find((m) => m.id === 'project'); + const isPublished = matches.find((m) => m.id === 'published'); + const isEntityListing = matches.find((m) => m.id === 'entity-listing'); + const isNees = matches.find((m) => m.id === 'nees'); + + const isReadOnly = + isPublished || isNees || system === 'designsafe.storage.community'; + + if (!isProjects) projectId = ''; + const { data } = useProjectDetail(projectId ?? ''); + if (projectId) { + system = `project-${data?.baseProject.uuid}`; + api = 'tapis'; + } + if (isPublished) { + system = 'designsafe.storage.published'; + api = 'tapis'; + } + if (isNees) { + system = 'nees.public'; + api = 'tapis'; + } + if (isNees && !path) { + path = `/${neesid}`; + } + + /* + Project landing pages have multiple selectable listings, so use the + useSelectedFilesForSystem hook to capture every selection on the page. + */ + const { selectedFiles: listingSelectedFiles } = useSelectedFiles( + api, + system, + path + ); + const publicationSelectedFiles = useSelectedFilesForSystem('tapis', system); + const selectedFiles = isEntityListing + ? publicationSelectedFiles + : listingSelectedFiles; + const rules = useMemo( function () { // Rules for which toolbar buttons are active for a given selection. return { canPreview: selectedFiles.length === 1 && selectedFiles[0].type === 'file', - canRename: user && selectedFiles.length === 1, - canCopy: user && selectedFiles.length >= 1, - canTrash: user && selectedFiles.length >= 1, + canRename: + user && + selectedFiles.length === 1 && + !isReadOnly && + !selectedFiles[0].path.endsWith('.hazmapper'), + canCopy: + user && + selectedFiles.length >= 1 && + !selectedFiles[0].path.endsWith('.hazmapper'), + canTrash: + user && + selectedFiles.length >= 1 && + !isReadOnly && + !selectedFiles[0].path.endsWith('.hazmapper'), + // Disable downloads from frontera.work until we have a non-flaky mount on ds-download. + canDownload: + selectedFiles.length >= 1 && + system !== 'designsafe.storage.frontera.work' && + !selectedFiles[0].path.endsWith('.hazmapper'), }; }, - [selectedFiles, user] + [selectedFiles, isReadOnly, user, system] ); return ( @@ -62,11 +127,28 @@ export const DatafilesToolbar: React.FC<{ searchInput?: React.ReactNode }> = ({ )} - + {({ onClick }) => ( + + + Move + + )} + + {({ onClick }) => ( = ({ )} - + {({ onClick }) => ( = ({ Trash + + {({ onClick }) => ( + + + Download + + )} +
    ); diff --git a/client/modules/datafiles/src/DatafilesToolbar/TrashButton.tsx b/client/modules/datafiles/src/DatafilesToolbar/TrashButton.tsx index 1ddd578cf0..acdd4c287c 100644 --- a/client/modules/datafiles/src/DatafilesToolbar/TrashButton.tsx +++ b/client/modules/datafiles/src/DatafilesToolbar/TrashButton.tsx @@ -29,7 +29,7 @@ const TrashButton: React.FC> = React.memo( // const trashPath = path === 'myData' ? '${user.username}/.Trash' : '.Trash'; const userUsername: string | undefined = user?.username; let trashPath: string; - if (typeof userUsername === 'string') { + if (typeof userUsername === 'string' && !system.startsWith('project-')) { trashPath = userUsername + '/.Trash'; updateFilesPath(trashPath); } else { @@ -42,7 +42,12 @@ const TrashButton: React.FC> = React.memo( return ( - ), + + + )} +
    + {(fileTags ?? []) + .filter((tag) => tag.path === record.path) + .map((tag) => ( + + {tag.tagName} + + ))} + + ), }, { title: 'Size', @@ -94,7 +116,7 @@ export const FileListing: React.FC< render: (d) => new Date(d).toLocaleString(), }, ], - [setPreviewModalState, baseRoute] + [setPreviewModalState, baseRoute, fileTags, doi] ); return ( @@ -105,14 +127,15 @@ export const FileListing: React.FC< scheme={scheme} path={path} columns={columns} + emptyListingDisplay={emptyListingDisplay} {...tableProps} /> - {previewModalState.path && ( + {previewModalState.path && previewModalState.selectedFile && ( setPreviewModalState({ isOpen: false })} /> )} diff --git a/client/modules/datafiles/src/index.ts b/client/modules/datafiles/src/index.ts index 7b7de2123f..ff75d61eb3 100644 --- a/client/modules/datafiles/src/index.ts +++ b/client/modules/datafiles/src/index.ts @@ -1,11 +1,11 @@ export * from './lib/datafiles'; export * from './DatafilesSideNav/DatafilesSideNav'; +export * from './DatafilesHelpDropdown/DatafilesHelpDropdown'; export * from './AddFileFolder/AddFileFolder'; export * from './FileListing/FileListing'; export { default as DatafilesModal } from './DatafilesModal/DatafilesModal'; export * from './DatafilesToolbar/DatafilesToolbar'; -export * from './DatafilesBreadcrumb/DatafilesBreadcrumb'; export * from './nees'; export * from './projects'; diff --git a/client/modules/datafiles/src/nees/NeesDetails.module.css b/client/modules/datafiles/src/nees/NeesDetails.module.css new file mode 100644 index 0000000000..eefa204c32 --- /dev/null +++ b/client/modules/datafiles/src/nees/NeesDetails.module.css @@ -0,0 +1,34 @@ +.line-clamped { + display: -webkit-box; + -webkit-box-orient: vertical; + -webkit-line-clamp: 4; + overflow: hidden; +} + +.line-unclamped { + display: -webkit-box; + -webkit-box-orient: vertical; + overflow: hidden; +} + +th { + vertical-align: top; +} + +.nees-th { + width: 25%; +} + +.nees-td { + width: 75%; +} + +.nees-mini-table { + width: 100%; + margin-bottom: 0; +} + +.nees-divider { + margin-top: 10px; + margin-bottom: 10px; +} diff --git a/client/modules/datafiles/src/nees/NeesDetails.tsx b/client/modules/datafiles/src/nees/NeesDetails.tsx new file mode 100644 index 0000000000..2ceef5f2ba --- /dev/null +++ b/client/modules/datafiles/src/nees/NeesDetails.tsx @@ -0,0 +1,445 @@ +import { useNeesDetails } from '@client/hooks'; +import { Tabs, Button, Divider, Modal, Flex } from 'antd'; +import React, { useEffect, useState, useCallback } from 'react'; +import { Link, useParams } from 'react-router-dom'; + +import styles from './NeesDetails.module.css'; +import { DatafilesBreadcrumb } from '@client/common-components'; +import { FileListing } from '../FileListing/FileListing'; + +export const DescriptionExpander: React.FC = ({ + children, +}) => { + const [expanderRef, setExpanderRef] = useState(null); + const [expanded, setExpanded] = useState(false); + const [expandable, setExpandable] = useState(false); + + const expanderRefCallback = useCallback( + (node: HTMLElement) => { + if (node !== null) setExpanderRef(node); + }, + [setExpanderRef] + ); + + useEffect(() => { + const ro = new ResizeObserver((entries) => { + for (const entry of entries) { + setExpandable(entry.target.scrollHeight > entry.target.clientHeight); + } + }); + expanderRef && ro.observe(expanderRef); + return () => { + ro.disconnect(); + }; + }, [setExpandable, expanderRef]); + + return ( +
    + + {children} + + {(expandable || expanded) && ( + + )} +
    + ); +}; + +export const NeesDetails: React.FC<{ neesId: string }> = ({ neesId }) => { + const { data } = useNeesDetails(neesId); + const neesProjectData = data?.metadata.project; + const neesExperiments = data?.metadata.experiments; + const numDOIs = neesExperiments?.filter((exp) => !!exp.doi).length || 0; + const routeParams = useParams(); + const path = routeParams.path ?? data?.path; + + const [activeTab, setActiveTab] = useState('files'); + useEffect(() => setActiveTab('files'), [path]); + + const neesCitations = neesExperiments + ?.filter((exp) => !!exp.doi) + .map((u) => { + const authors = u.creators + ?.map((a) => a.lastName + ', ' + a.firstName) + .join('; '); + const doi = u.doi; + const doiUrl = 'https://doi.org/' + doi; + const year = u.endDate + ? u.endDate.split('T')[0].split('-')[0] + : u.startDate.split('T')[0].split('-')[0]; + + return ( +
    + {authors}, ({year}), "{u.title}", DesignSafe-CI [publisher], doi:{' '} + {doi} +
    + {doiUrl} + +
    + ); + }); + + const doiList = () => { + Modal.info({ + title: 'DOIs', + content: neesCitations, + width: 600, + }); + }; + + const experimentsList = neesExperiments?.map((exp) => { + return ( +
    + +
    {exp.name}
    +
    +
    + + + + + + + + + + + + + + {exp.doi ? ( + + + + + ) : ( + + )} + {exp.doi ? ( + + + + + ) : ( + + )} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Title{exp.title}
    Creators + {exp.creators + ? exp.creators?.map((c) => ( +
    + {c.firstName} {c.lastName} +
    + )) + : 'No Creators Listed'} +
    DOI{exp.doi}
    Citation + {exp.creators + ?.map( + (author) => author.lastName + ', ' + author.firstName + ) + .join('; ')} + , ( + {exp.endDate + ? exp.endDate.split('T')[0].split('-')[0] + : exp.startDate.split('T')[0].split('-')[0]} + ), "{exp.title}", DesignSafe-CI [publisher], doi:{' '} + {exp.doi} +
    Type{exp.type}
    Description + {exp.description ? ( + + {exp.description} + + ) : ( + 'No Description' + )} +
    Start Date{exp.startDate}
    End Date{exp.endDate ? exp.endDate : 'No End Date'}
    Equipment + + + {exp.equipment ? ( + + + + + + + ) : ( + + + + )} + + + {exp.equipment?.map((eq) => ( + + + + + + + ))} + +
    EquipmentComponentEquipment ClassFacility
    No Equipment Listed
    {eq.equipment}{eq.component}{eq.equipmentClass}{eq.facility}
    +
    Material + {exp.material + ? exp.material?.map((mat) => ( +
    +
    {mat.component}:
    +
    + {mat.materials?.map((mats) => ( +
    {mats}
    + ))} +
    +
    +
    + )) + : 'No Materials Listed '} +
    Files + setActiveTab('files')} + > + {' '} + {exp.name} + +
    + + + + + ); + }); + + const neesFiles = ( + <> + { + return ( + + {obj.title} + + ); + }} + /> + + + ); + + return ( + <> +
    +

    + {neesProjectData?.name}: {neesProjectData?.title} +

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + + + + + +
    PIs + {neesProjectData?.pis + ? neesProjectData?.pis.map((u) => ( +
    + {u.firstName} {u.lastName} +
    + )) + : 'No PIs Listed'} +
    +
    + + + + + +
    Organizations + {neesProjectData?.organization + ? neesProjectData?.organization.map((u) => ( +
    + {u.name} {u.state}, {u.country} +
    + )) + : 'No Organizations Listed'} +
    +
    + + + +
    + + + + + +
    NEES ID{neesProjectData?.name}
    +
    + + + + + +
    Sponsors + {neesProjectData?.sponsor + ? neesProjectData?.sponsor?.map((u) => ( +
    + + {u.name} + +
    + )) + : 'No Sponsors Listed'} +
    +
    + + + +
    + + + + + + + +
    Project TypeNEES
    +
    + + + + + + + +
    Start Date + {neesProjectData?.startDate + ? neesProjectData?.startDate + : 'No Start Date'} +
    +
    + + + +
    + + {numDOIs > 0 ? ( + + + + + ) : ( + + )} + +
    DOIs + +
    + + + Description: + + {neesProjectData?.description} + +
    +
    + setActiveTab(activeKey)} + type="card" + items={[ + { + key: 'files', + label: 'Files', + children: neesFiles, + }, + { + key: 'experiments', + label: 'Experiments', + children: experimentsList, + }, + ]} + /> +
    + + ); +}; diff --git a/client/modules/datafiles/src/nees/NeesListing.tsx b/client/modules/datafiles/src/nees/NeesListing.tsx index 6e8002d946..200a88842e 100644 --- a/client/modules/datafiles/src/nees/NeesListing.tsx +++ b/client/modules/datafiles/src/nees/NeesListing.tsx @@ -57,7 +57,7 @@ export const NeesListing: React.FC = () => { scroll={{ y: '100%' }} rowKey={(row) => row.path} pagination={{ - total: data?.listing.length, + total: data?.total, showSizeChanger: false, current: currentPage, pageSize: 100, diff --git a/client/modules/datafiles/src/nees/index.ts b/client/modules/datafiles/src/nees/index.ts index ec5ae9b297..544ad7dd59 100644 --- a/client/modules/datafiles/src/nees/index.ts +++ b/client/modules/datafiles/src/nees/index.ts @@ -1 +1,2 @@ export { NeesListing } from './NeesListing'; +export { NeesDetails } from './NeesDetails'; diff --git a/client/modules/datafiles/src/projects/BaseProjectDetails.module.css b/client/modules/datafiles/src/projects/BaseProjectDetails.module.css index 8a66681587..ae1129fd79 100644 --- a/client/modules/datafiles/src/projects/BaseProjectDetails.module.css +++ b/client/modules/datafiles/src/projects/BaseProjectDetails.module.css @@ -12,5 +12,5 @@ } .prj-row td { - padding: 2px 0px; + padding: 3px 0px; } diff --git a/client/modules/datafiles/src/projects/BaseProjectDetails.tsx b/client/modules/datafiles/src/projects/BaseProjectDetails.tsx index dc61320f18..a9fa249fd9 100644 --- a/client/modules/datafiles/src/projects/BaseProjectDetails.tsx +++ b/client/modules/datafiles/src/projects/BaseProjectDetails.tsx @@ -4,6 +4,11 @@ import { TBaseProjectValue, TProjectUser } from '@client/hooks'; import styles from './BaseProjectDetails.module.css'; import { Button, Col, Popover, Row, Select, Tooltip } from 'antd'; import { useSearchParams } from 'react-router-dom'; +import { RelateDataModal } from './modals'; +import { ProjectInfoModal } from './modals/ProjectInfoModal'; +import { VersionChangesModal } from './modals/VersionChangesModal'; +import { SubmitFeedbackModal } from '../publications/modals/SubmitFeedbackModal'; +import { filterHazmapperMaps, getHazmapperUrl } from './utils'; export const DescriptionExpander: React.FC = ({ children, @@ -65,14 +70,15 @@ export const LicenseDisplay: React.FC<{ licenseType: string }> = ({
      - {licenseType} + {licenseType}
    ); }; export const UsernamePopover: React.FC<{ user: TProjectUser }> = ({ user }) => { const content = ( -
    = ({ user }) => { gap: '10px', }} > - - Name - + + + Name + + {user.fname} {user.lname} - - Email - + + + Email + + {user.email} - - Institution - + + + Institution + + {user.inst} -
    +
    ); return ( = ({ projectValue, publicationDate, versions }) => { + isPublished?: boolean; +}> = ({ projectValue, publicationDate, versions, isPublished }) => { const pi = projectValue.users.find((u) => u.role === 'pi'); const coPis = projectValue.users.filter((u) => u.role === 'co_pi'); const projectType = [ @@ -150,6 +163,14 @@ export const BaseProjectDetails: React.FC<{ }); }; + const currentVersion = versions + ? parseInt(searchParams.get('version') ?? Math.max(...versions).toString()) + : 1; + + const filteredHazmapperMaps = filterHazmapperMaps( + projectValue.hazmapperMaps ?? [] + ); + return (
    - + )} {(projectValue.dataTypes?.length ?? 0) > 0 && ( @@ -283,6 +312,7 @@ export const BaseProjectDetails: React.FC<{ )} + {(projectValue.referencedData?.length ?? 0) > 0 && ( + + + + + )} - {(projectValue.hazmapperMaps?.length ?? 0) > 0 && ( + {(filteredHazmapperMaps?.length ?? 0) > 0 && ( )}
    Project Type{projectType} + {projectType} + {!isPublished && ( + <> + {' '} + + + )} +
    {projectValue.associatedProjects.map((assoc) => (
    + {assoc.type} |{' '}
    Referenced Data and Software + {projectValue.referencedData.map((ref) => ( +
    + {ref.hrefType && `${ref.hrefType} | `} + + {ref.title} + +
    + ))} +
    Keywords {projectValue.keywords.join(', ')}
    Hazmapper Maps - {(projectValue.hazmapperMaps ?? []).map((m) => ( + {(filteredHazmapperMaps ?? []).map((m) => (
    - - Description: - {projectValue.description} - + + {isPublished && ( +
    + {!['other', 'field_reconnaissance'].includes( + projectValue.projectType + ) && ( + <> + + {({ onClick }) => ( + + )} + {' '} + |{' '} + + )} + +
    + )} + {projectValue.description && ( + + Description: + {projectValue.description} + + )}
    ); }; diff --git a/client/modules/datafiles/src/projects/EmptyProjectFileListing.tsx b/client/modules/datafiles/src/projects/EmptyProjectFileListing.tsx new file mode 100644 index 0000000000..09bf0892d4 --- /dev/null +++ b/client/modules/datafiles/src/projects/EmptyProjectFileListing.tsx @@ -0,0 +1,24 @@ +import React from 'react'; + +export const EmptyProjectFileListing: React.FC = () => { + return ( +

    + This folder is empty!
    + +   + +
    + + +   + + Learn how to move files to a project + +

    + ); +}; diff --git a/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.module.css b/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.module.css new file mode 100644 index 0000000000..f47cb1f1b2 --- /dev/null +++ b/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.module.css @@ -0,0 +1,3 @@ +.yellow-highlight { + background-color: #ece4bf; +} diff --git a/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.tsx b/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.tsx index f4fd8dc9d4..28c9fe72e7 100644 --- a/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.tsx +++ b/client/modules/datafiles/src/projects/ProjectCitation/ProjectCitation.tsx @@ -1,4 +1,11 @@ -import { useProjectDetail, usePublicationDetail } from '@client/hooks'; +import React, { useState } from 'react'; +import { + useDataciteMetrics, + useProjectDetail, + usePublicationDetail, +} from '@client/hooks'; +import { MetricsModal } from '../modals/MetricsModal'; +import styles from './ProjectCitation.module.css'; export const ProjectCitation: React.FC<{ projectId: string; @@ -6,14 +13,17 @@ export const ProjectCitation: React.FC<{ }> = ({ projectId, entityUuid }) => { const { data } = useProjectDetail(projectId); const entityDetails = data?.entities.find((e) => e.uuid === entityUuid); - + const authors = + entityDetails?.value.authors?.filter((a) => a.fname && a.lname) ?? []; if (!data || !entityDetails) return null; return (
    - {(entityDetails.value.authors ?? []) + {authors .map((author, idx) => idx === 0 - ? `${author.lname}, ${author.fname[0]}.` + ? `${author.lname}, ${author.fname[0]}${ + authors.length > 1 ? '.' : '' + }` : `${author.fname[0]}. ${author.lname}` ) .join(', ')} @@ -33,20 +43,142 @@ export const PublishedCitation: React.FC<{ const entityDetails = (data?.tree.children ?? []).find( (child) => child.uuid === entityUuid && child.version === version ); + + const authors = entityDetails?.value.authors ?? []; if (!data || !entityDetails) return null; + const doi = + entityDetails.value.dois && entityDetails.value.dois.length > 0 + ? entityDetails.value.dois[0] + : ''; + return (
    - {(entityDetails.value.authors ?? []) + {authors .map((author, idx) => idx === 0 - ? `${author.lname}, ${author.fname[0]}.` + ? `${author.lname}, ${author.fname[0]}${ + authors.length > 1 ? '.' : '' + }` : `${author.fname[0]}. ${author.lname}` ) .join(', ')}{' '} ({new Date(entityDetails.publicationDate).getFullYear()}). " {entityDetails.value.title}", in {data.baseProject.title}. - DesignSafe-CI. ({entityDetails.value.dois && entityDetails.value.dois[0]}) + DesignSafe-CI.{' '} + {doi && ( + + https://doi.org/{doi} + + )} + {/* DesignSafe-CI. ({entityDetails.value.dois && entityDetails.value.dois[0]}) */} +
    + ); +}; + +export const DownloadCitation: React.FC<{ + projectId: string; + entityUuid: string; + preview?: boolean; +}> = ({ projectId, entityUuid, preview }) => { + const { + data, + isLoading: isProjectLoading, + isError: isProjectError, + error: projectError, + } = usePublicationDetail(projectId); + + const [isModalVisible, setIsModalVisible] = useState(false); + + const entityDetails = (data?.tree.children ?? []).find( + (child) => child.uuid === entityUuid + ); + + const doi = + entityDetails?.value.dois && entityDetails.value.dois.length > 0 + ? entityDetails.value.dois[0] + : ''; + + const { data: dataciteMetrics } = useDataciteMetrics(doi, !preview); + + const openModal = () => { + setIsModalVisible(true); + }; + + const closeModal = () => { + setIsModalVisible(false); + }; + + if (isProjectLoading) return
    Loading project details...
    ; + if (isProjectError) + return
    Error fetching project details: {projectError.message}
    ; + if (!entityDetails) return null; + + return ( +
    + {dataciteMetrics && !preview && ( +
    + Download Citation: + + DataCite XML + {' '} + | + + {' '} + RIS + {' '} + | + + {' '} + BibTeX + +
    + + {dataciteMetrics?.data.attributes.downloadCount ?? '--'} Downloads + +      + + {dataciteMetrics?.data.attributes.viewCount ?? '--'} Views + +      + + {dataciteMetrics?.data.attributes.citationCount ?? '--'} Citations + +      + + Details + + +
    +
    + )}
    ); }; diff --git a/client/modules/datafiles/src/projects/ProjectCurationFileListing/ProjectCurationFileListing.tsx b/client/modules/datafiles/src/projects/ProjectCurationFileListing/ProjectCurationFileListing.tsx index 61830f027c..de9233a55c 100644 --- a/client/modules/datafiles/src/projects/ProjectCurationFileListing/ProjectCurationFileListing.tsx +++ b/client/modules/datafiles/src/projects/ProjectCurationFileListing/ProjectCurationFileListing.tsx @@ -1,11 +1,12 @@ import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { FileListingTable, + FileTypeIcon, TFileListingColumns, -} from '../../FileListing/FileListingTable/FileListingTable'; +} from '@client/common-components'; import { toBytes } from '../../FileListing/FileListing'; import { PreviewModalBody } from '../../DatafilesModal/PreviewModal'; -import { NavLink } from 'react-router-dom'; +import { NavLink, useParams } from 'react-router-dom'; import { TEntityMeta, TFileListing, @@ -15,6 +16,7 @@ import { useFileTags, useProjectDetail, useRemoveFileAssociation, + useSelectedFiles, useSetFileTags, } from '@client/hooks'; import { Button, Select } from 'antd'; @@ -25,6 +27,7 @@ import { } from '../constants'; import { DefaultOptionType } from 'antd/es/select'; import { FILE_TAG_OPTIONS } from './ProjectFileTagOptions'; +import { EmptyProjectFileListing } from '../EmptyProjectFileListing'; const FileTagInput: React.FC<{ projectId: string; @@ -98,6 +101,15 @@ const FileCurationSelector: React.FC<{ const [selectedEntity, setSelectedEntity] = useState( undefined ); + const { path } = useParams(); + const { selectedFiles, unsetSelections } = useSelectedFiles( + 'tapis', + fileObj.system, + path ?? '' + ); + const showEntitySelector = + selectedFiles.length === 0 || + (selectedFiles.length > 0 && fileObj.path === selectedFiles[0].path); const entitiesForFile = useMemo(() => { const associatedEntities = Object.keys(filePathsToEntities) @@ -137,6 +149,7 @@ const FileCurationSelector: React.FC<{
      {entitiesForFile.map((e) => (
    • + {}
      ))}
    • -
      - - value={selectedEntity} - allowClear - onChange={(newVal) => setSelectedEntity(newVal)} - options={options} - placeholder="Select Category" - style={{ flex: 1 }} - /> - {selectedEntity && ( - - )} -
      + {showEntitySelector && ( +
      + + virtual={false} + value={selectedEntity} + allowClear + onChange={(newVal) => setSelectedEntity(newVal)} + options={options} + placeholder={`Select Category ${ + selectedFiles.length > 0 + ? `for ${selectedFiles.length} selected file(s)` + : '' + }`} + style={{ flex: 1 }} + /> + {selectedEntity && ( + + )} +
      + )}
    @@ -231,20 +260,21 @@ export const ProjectCurationFileListing: React.FC<{ const tagMapping = useFileTags(projectId); const options: DefaultOptionType[] = useMemo( () => - ENTITIES_WITH_FILES[data?.baseProject.value.projectType ?? 'None'].map( - (t) => ({ + ENTITIES_WITH_FILES[data?.baseProject.value.projectType ?? 'None'] + .map((t) => ({ label: DISPLAY_NAMES[t], options: data?.entities .filter((e) => e.name === t) .map((e) => ({ label: e.value.title, value: e.uuid })), - }) - ), + })) + .filter((t) => (t.options?.length ?? 0) > 0), [data] ); const [previewModalState, setPreviewModalState] = useState<{ isOpen: boolean; path?: string; + selectedFile?: TFileListing; }>({ isOpen: false }); const columns: TFileListingColumns = useMemo( @@ -273,21 +303,22 @@ export const ProjectCurationFileListing: React.FC<{ {data} ) : ( - + {data} + + )}
    {' '} } scroll={{ y: 500 }} /> - {previewModalState.path && ( + {previewModalState.path && previewModalState.selectedFile && ( setPreviewModalState({ isOpen: false })} /> )} diff --git a/client/modules/datafiles/src/projects/ProjectMetrics/ProjectMetrics.module.css b/client/modules/datafiles/src/projects/ProjectMetrics/ProjectMetrics.module.css new file mode 100644 index 0000000000..e69de29bb2 diff --git a/client/modules/datafiles/src/projects/ProjectMetrics/ProjectMetrics.tsx b/client/modules/datafiles/src/projects/ProjectMetrics/ProjectMetrics.tsx new file mode 100644 index 0000000000..e6f4275f59 --- /dev/null +++ b/client/modules/datafiles/src/projects/ProjectMetrics/ProjectMetrics.tsx @@ -0,0 +1,29 @@ +import { usePublicationDetail } from '@client/hooks'; + +export const ProjectMetrics: React.FC<{ + projectId: string; + entityUuid: string; + version?: number; +}> = ({ projectId, entityUuid, version = 1 }) => { + const { data } = usePublicationDetail(projectId); + + const entityDetails = (data?.tree.children ?? []).find( + (child) => child.uuid === entityUuid && child.version === version + ); + if (!data || !entityDetails) return null; + + return ( +
    + {(entityDetails.value.authors ?? []) + .map((author, idx) => + idx === 0 + ? `${author.lname}, ${author.fname[0]}.` + : `${author.fname[0]}. ${author.lname}` + ) + .join(', ')}{' '} + ({new Date(entityDetails.publicationDate).getFullYear()}). " + {entityDetails.value.title}", in {data.baseProject.title}. + DesignSafe-CI. ({entityDetails.value.dois && entityDetails.value.dois[0]}) +
    + ); +}; diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineOrderAuthors.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineOrderAuthors.tsx index 42b311d392..36cbb7c7e2 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineOrderAuthors.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineOrderAuthors.tsx @@ -56,7 +56,7 @@ export const PipelineOrderAuthors: React.FC<{ > - - + {searchParams.get('operation') === 'amend' ? ( + + File selections cannot be changed when amending a publication. + If you need to make a change to published files, please create a + new version instead. + + } + /> + ) : ( + <> +
    +

    Select Files

    {' '} + +
    + + + )} ); diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadCategories.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadCategories.tsx index dd75f0784d..01179555a2 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadCategories.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadCategories.tsx @@ -3,12 +3,14 @@ import { TPreviewTreeData, useProjectPreview } from '@client/hooks'; import { Button } from 'antd'; import { useSearchParams } from 'react-router-dom'; import { PublishedEntityDisplay } from '../ProjectPreview/ProjectPreview'; +import { PipelineEditCategoryModal } from '../modals'; export const PipelineProofreadCategories: React.FC<{ projectId: string; + displayName?: string; nextStep: () => void; prevStep: () => void; -}> = ({ projectId, nextStep, prevStep }) => { +}> = ({ projectId, displayName, nextStep, prevStep }) => { const { data } = useProjectPreview(projectId ?? ''); const { children } = (data?.tree ?? { children: [] }) as TPreviewTreeData; const [searchParams] = useSearchParams(); @@ -35,7 +37,7 @@ export const PipelineProofreadCategories: React.FC<{ > + )} + + ))} diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadProjectStep.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadProjectStep.tsx index 26769efcde..be46b89243 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadProjectStep.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadProjectStep.tsx @@ -44,7 +44,7 @@ export const PipelineProofreadProjectStep: React.FC<{ target="_blank" aria-describedby="msg-open-new-window" > - Curation office hours + curation office hours {' '} for help with publishing. diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadPublications.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadPublications.tsx index 3cbe281691..c870de6dde 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadPublications.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineProofreadPublications.tsx @@ -4,12 +4,14 @@ import { Button } from 'antd'; import { useSearchParams } from 'react-router-dom'; import { TPreviewTreeData } from '@client/hooks'; import { PublishedEntityDisplay } from '../ProjectPreview/ProjectPreview'; +import { PipelineEditCategoryModal } from '../modals'; export const PipelineProofreadPublications: React.FC<{ projectId: string; + displayName?: string; nextStep: () => void; prevStep: () => void; -}> = ({ projectId, nextStep, prevStep }) => { +}> = ({ projectId, displayName, nextStep, prevStep }) => { const { data } = useProjectPreview(projectId ?? ''); const { children } = (data?.tree ?? { children: [] }) as TPreviewTreeData; const [searchParams] = useSearchParams(); @@ -47,10 +49,56 @@ export const PipelineProofreadPublications: React.FC<{ Continue +

    + Proofread your {displayName} Metadata +

    +
      +
    • If you selected the wrong collection, go back to Selection.
    • +
    • + If you need to add or modify files, click "Exit Prepare to Publish" + and make your changes in the Curation Directory. +
    • +
    • + If you need help, attend{' '} + + curation office hours + {' '} + for help with publishing. +
    • +
    -
    +
    {sortedChildren.map((child) => (
    +
    + + {({ onClick }) => ( + + )} + +
    = ({ projectId, entityUuids, projectType, disabled }) => { +}> = ({ projectId, entityUuids, operation, projectType, disabled }) => { const [isModalOpen, setIsModalOpen] = useState(false); const showModal = () => { @@ -17,12 +25,56 @@ export const PipelinePublishModal: React.FC<{ setIsModalOpen(false); }; + const [versionInfo, setVersionInfo] = useState(''); + + const { mutate: publishMutation } = usePublishProject(); + const { mutate: amendMutation } = useAmendProject(); + const { mutate: versionMutation } = useVersionProject(); + const navigate = useNavigate(); + const { notifyApi } = useNotifyContext(); + const successCallback = () => { + navigate(`/projects/${projectId}`); + notifyApi?.open({ + type: 'success', + message: '', + description: 'Your publication request has been submitted', + placement: 'bottomLeft', + }); + }; + + const doPublish = () => { + switch (operation) { + case 'publish': + publishMutation( + { projectId, entityUuids }, + { onSuccess: successCallback } + ); + break; + case 'amend': + amendMutation({ projectId }, { onSuccess: successCallback }); + break; + case 'version': + versionMutation( + { projectId, entityUuids, versionInfo }, + { onSuccess: successCallback } + ); + break; + } + }; + + const publishButtonText: Record = { + amend: 'Amend Publication', + version: 'Create a New Version', + publish: 'Request DOI & Publish', + }; + const [protectedDataAgreement, setProtectedDataAgreement] = useState(false); const [publishingAgreement, setPublishingAgreement] = useState(false); const canPublish = publishingAgreement && - (projectType === 'field_recon' ? protectedDataAgreement : true); + (projectType === 'field_recon' ? protectedDataAgreement : true) && + (operation === 'version' ? !!versionInfo : true); return ( <> @@ -33,37 +85,74 @@ export const PipelinePublishModal: React.FC<{ type="primary" onClick={showModal} > - Request DOI and Publish +   + {publishButtonText[operation]} ( -
    - - setPublishingAgreement(e.target.checked)} - /> - - - + + setPublishingAgreement(e.target.checked)} + /> + + + +
    )} onCancel={handleCancel} @@ -230,23 +319,15 @@ export const PipelinePublishModal: React.FC<{ > setProtectedDataAgreement(e.target.checked)} /> - -
    )} diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineSelectForPublish.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineSelectForPublish.tsx index b559670f98..fede1d21e0 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/PipelineSelectForPublish.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/PipelineSelectForPublish.tsx @@ -1,4 +1,4 @@ -import React, { useMemo, useState } from 'react'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { TPipelineValidationResult, TPreviewTreeData, @@ -17,22 +17,44 @@ const PipelineValidationAlert: React.FC<{ + message={ + + {' '} Your selection has missing data or incomplete requirements. Please review the following fields: - {(validationErrors ?? []).map((validationError) => ( -
    - In the {DISPLAY_NAMES[validationError.name]}{' '} - {validationError.title}, the following - requirements are missing or incomplete: -
      - {validationError.missing.map((missingReq) => ( -
    • {DISPLAY_NAMES[missingReq]}
    • - ))} -
    -
    - ))}{' '} +
    + } + description={ +
    + {(validationErrors ?? []) + .filter((e) => e.errorType === 'MISSING_ENTITY') + .map((validationError) => ( +
    + In the {DISPLAY_NAMES[validationError.name]}{' '} + {validationError.title}, the following + requirements are missing or incomplete: +
      + {validationError.missing.map((missingReq) => ( +
    • {DISPLAY_NAMES[missingReq]}
    • + ))} +
    +
    + ))} + {(validationErrors ?? []) + .filter((e) => e.errorType === 'MISSING_FILES') + .map((validationError) => ( +
    + The {DISPLAY_NAMES[validationError.name]}{' '} + {validationError.title} has no associated data. +
    + ))} + {(validationErrors ?? []) + .filter((e) => e.errorType === 'NO_SELECTION') + .map((validationError) => ( +
    + No publishable collections are selected. +
    + ))}
    } /> @@ -55,24 +77,59 @@ export const PipelineSelectForPublish: React.FC<{ [children] ); const [searchParams, setSearchParams] = useSearchParams(); + const operation = searchParams.get('operation'); + const selectedEntities = searchParams.getAll('selected'); - const toggleEntitySelection = (uuid: string) => { - const selectedEntities = searchParams.getAll('selected'); - const newSearchParams = new URLSearchParams(searchParams); + const toggleEntitySelection = useCallback( + (uuid: string) => { + const selectedEntities = searchParams.getAll('selected'); + const newSearchParams = new URLSearchParams(searchParams); - if (selectedEntities.includes(uuid)) { - newSearchParams.delete('selected', uuid); - setSearchParams(newSearchParams, { replace: true }); - } else { - newSearchParams.append('selected', uuid); - setSearchParams(newSearchParams, { replace: true }); + if (selectedEntities.includes(uuid)) { + newSearchParams.delete('selected', uuid); + setSearchParams(newSearchParams, { replace: true }); + } else { + newSearchParams.append('selected', uuid); + setSearchParams(newSearchParams, { replace: true }); + } + }, + [setSearchParams, searchParams] + ); + + useEffect(() => { + if (operation !== 'publish') { + const publishableChildren = sortedChildren.filter((child) => + data?.entities.some( + (ent) => ent.uuid === child.uuid && (ent.value.dois?.length ?? 0) > 0 + ) + ); + publishableChildren.forEach((c) => { + if (!selectedEntities.includes(c.uuid)) { + toggleEntitySelection(c.uuid); + } + }); } - }; + }, [ + operation, + sortedChildren, + data, + toggleEntitySelection, + selectedEntities, + ]); const validateAndContinue = async () => { const entityUuids = searchParams.getAll('selected'); const res = await mutateAsync({ projectId, entityUuids: entityUuids }); - if (res.result.length > 0) { + if (entityUuids.length === 0) { + setValidationErrors([ + { + name: 'Project', + title: 'Project', + errorType: 'NO_SELECTION', + missing: [], + }, + ]); + } else if (res.result.length > 0) { setValidationErrors(res.result); } else { setValidationErrors(undefined); @@ -106,6 +163,31 @@ export const PipelineSelectForPublish: React.FC<{ Continue + {operation !== 'publish' && ( + + Amending or revising a project will impact all previously + published works. New datasets cannot be published through this + process. +
    + If you need to publish subsequent dataset(s), please{' '} + + submit a ticket + {' '} + with your project number, the name of the dataset(s), and the + author order of the dataset(s). + + } + /> + )} {(validationErrors?.length ?? 0) > 0 && ( )} @@ -113,6 +195,7 @@ export const PipelineSelectForPublish: React.FC<{ {sortedChildren.map((child) => (
    - + diff --git a/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx b/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx index 3ea46d3a60..dc407ce42a 100644 --- a/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx +++ b/client/modules/datafiles/src/projects/ProjectPipeline/ProjectPipeline.tsx @@ -8,7 +8,7 @@ import { PipelineOrderAuthors } from './PipelineOrderAuthors'; import { PipelineProofreadPublications } from './PipelineProofreadPublications'; import { PipelineProofreadCategories } from './PipelineProofreadCategories'; import { PipelineSelectLicense } from './PipelineSelectLicense'; -import { useSearchParams } from 'react-router-dom'; +import { Link, useSearchParams } from 'react-router-dom'; const getSteps = ( projectId: string, @@ -19,10 +19,10 @@ const getSteps = ( const proofreadStepMapping: Partial< Record > = { - experimental: 'Experiments', - field_recon: 'Missions', - hybrid_simulation: 'Hybrid Simulations', - simulation: 'Simulations', + experimental: 'Experiment', + field_recon: 'Mission/Documents', + hybrid_simulation: 'Hybrid Simulation', + simulation: 'Simulation', }; switch (projectType) { @@ -51,6 +51,7 @@ const getSteps = ( title: `Proofread ${proofreadStepMapping[projectType]}`, content: ( = ({ projectId, }) => { const [current, setCurrent] = useState(0); - const [, setSearchParams] = useSearchParams(); + const [searchParams, setSearchParams] = useSearchParams(); const { data } = useProjectDetail(projectId); const projectType = data?.baseProject.value.projectType; @@ -172,10 +174,33 @@ export const ProjectPipeline: React.FC<{ projectId: string }> = ({ return getSteps(projectId, projectType, next, prev); }, [projectId, projectType, next, prev]); + const operationName = { + amend: 'Amending', + version: 'Versioning', + publish: 'Publishing', + }[searchParams.get('operation') ?? 'publish']; + const items = steps.map((item) => ({ key: item.title, title: item.title })); if (!data) return null; return (
    +
    +

    + {operationName} {projectId} +

    + + +   Exit Prepare to + Publish + +
    {steps[current].content}
    diff --git a/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.module.css b/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.module.css index 67f9b505b5..ed104d5ab2 100644 --- a/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.module.css +++ b/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.module.css @@ -19,3 +19,7 @@ background-color: rgba(0, 0, 0, 0.02); border: 1px solid #d9d9d9; } + +.yellow-highlight { + background-color: #ece4bf; +} diff --git a/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx b/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx index 80f7dc2290..c6eda1034e 100644 --- a/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx +++ b/client/modules/datafiles/src/projects/ProjectPreview/ProjectPreview.tsx @@ -1,78 +1,169 @@ import React, { useEffect, useMemo, useState } from 'react'; import { + apiClient, + DoiContextProvider, + TFileListing, TPreviewTreeData, + useDataciteMetrics, + useDoiContext, useProjectPreview, usePublicationDetail, usePublicationVersions, useSelectedFiles, } from '@client/hooks'; -import { Button, Collapse } from 'antd'; +import { Alert, Button, Collapse, Tag } from 'antd'; import styles from './ProjectPreview.module.css'; import { DISPLAY_NAMES, PROJECT_COLORS } from '../constants'; import { ProjectCollapse } from '../ProjectCollapser/ProjectCollapser'; import { ProjectCitation, PublishedCitation, + DownloadCitation, } from '../ProjectCitation/ProjectCitation'; import { FileListingTable, + FileTypeIcon, TFileListingColumns, -} from '../../FileListing/FileListingTable/FileListingTable'; -import { NavLink } from 'react-router-dom'; +} from '@client/common-components'; +import { Link, useParams } from 'react-router-dom'; import { PublishedEntityDetails } from '../PublishedEntityDetails'; +import { PreviewModalBody } from '../../DatafilesModal/PreviewModal'; +import { SubEntityDetails } from '../SubEntityDetails'; +import { PipelineEditCategoryModal } from '../modals'; -const columns: TFileListingColumns = [ - { - title: 'File Name', - dataIndex: 'name', - ellipsis: true, - render: (data, record) => - record.type === 'dir' ? ( - - -    - - {data} - - ) : ( - +export const EntityFileListingTable: React.FC<{ + treeData: TPreviewTreeData; + preview?: boolean; +}> = ({ treeData, preview }) => { + const [previewModalState, setPreviewModalState] = useState<{ + isOpen: boolean; + path?: string; + selectedFile?: TFileListing; + }>({ isOpen: false }); + + const doi = useDoiContext(); + const columns: TFileListingColumns = [ + { + title: 'File Name', + dataIndex: 'name', + ellipsis: true, + render: (data, record) => ( +
    + {record.type === 'dir' ? ( + + +    + + {data} + + ) : ( + <> + +    + + + )} +
    + {treeData.value.fileTags + .filter((t) => t.path === record.path) + .map((t) => ( + + {t.tagName} + + ))} +
    +
    ), - }, -]; + }, + ]; + return ( + <> + + {previewModalState.path && previewModalState.selectedFile && ( + setPreviewModalState({ isOpen: false })} + /> + )} + + ); +}; function RecursiveTree({ treeData, + preview, defaultOpen = false, + showEditCategories = false, }: { treeData: TPreviewTreeData; defaultOpen?: boolean; + preview?: boolean; + showEditCategories?: boolean; }) { + const { projectId } = useParams(); return (
  • + {showEditCategories && ( +
    + + {({ onClick }) => ( + + )} + +
    + )} - {treeData.value.description} - + +
      {(treeData.children ?? []).map((child) => ( @@ -88,7 +179,12 @@ function RecursiveTree({ > - + ))}
    @@ -103,6 +199,7 @@ export const PublishedEntityDisplay: React.FC<{ treeData: TPreviewTreeData; defaultOpen?: boolean; defaultOpenChildren?: boolean; + showEditCategories?: boolean; }> = ({ projectId, preview, @@ -110,19 +207,53 @@ export const PublishedEntityDisplay: React.FC<{ license, defaultOpen = false, defaultOpenChildren = false, + showEditCategories = false, }) => { const [active, setActive] = useState(defaultOpen); const sortedChildren = useMemo( () => [...(treeData.children ?? [])].sort((a, b) => a.order - b.order), [treeData] ); + + const dois = + treeData.value.dois && treeData.value.dois.length > 0 + ? treeData.value.dois[0] + : ''; + const { data: citationMetrics } = useDataciteMetrics(dois, !preview); + + useEffect(() => { + if (active && !preview) { + const identifier = dois ?? treeData.uuid; + const path = `${projectId}/${treeData.name}/${identifier}`; + apiClient.get( + `/api/datafiles/agave/public/logentity/designsafe.storage.published/${path}`, + { params: { doi: dois } } + ); + } + }, [active, preview, dois, projectId, treeData.name, treeData.uuid]); + return (
    - {DISPLAY_NAMES[treeData.name]} | {treeData.value.title} + + {DISPLAY_NAMES[treeData.name]} |{' '} + {treeData.value.title} + + {preview && + ((treeData.value.dois?.length ?? 0) > 0 ? ( + Published + ) : ( + Unpublished + ))}
    )} +
    + {citationMetrics && ( +
    + +
    + )}
    null} @@ -171,21 +312,18 @@ export const PublishedEntityDisplay: React.FC<{ publicationDate={treeData.publicationDate} /> {(treeData.value.fileObjs?.length ?? 0) > 0 && ( - )} {(sortedChildren ?? []).map((child) => ( ))} @@ -209,6 +347,21 @@ export const ProjectPreview: React.FC<{ projectId: string }> = ({ ); if (!data) return null; + if (!sortedChildren.length) { + return ( + + No publishable collections were found for this project. You can add + a new collection under the "Curation Directory" tab. + + } + > + ); + } + return (
    {sortedChildren @@ -253,13 +406,15 @@ export const PublicationView: React.FC<{ child.name !== 'designsafe.project' ) .map((child, idx) => ( - + + + ))}
    ); diff --git a/client/modules/datafiles/src/projects/ProjectTitleHeader/ProjectTitleHeader.tsx b/client/modules/datafiles/src/projects/ProjectTitleHeader/ProjectTitleHeader.tsx index afc37943f3..36354a476f 100644 --- a/client/modules/datafiles/src/projects/ProjectTitleHeader/ProjectTitleHeader.tsx +++ b/client/modules/datafiles/src/projects/ProjectTitleHeader/ProjectTitleHeader.tsx @@ -17,6 +17,7 @@ export const ProjectTitleHeader: React.FC<{ projectId: string }> = ({ {baseProject.value.projectId} |{' '} {baseProject.value.title} + {({ onClick }) => ( + )} + + + {({ onClick }) => ( + + )} + + + ); + case 'experimental': + return ( + + {({ onClick }) => ( + + )} + + ); + case 'simulation': + return ( + + {({ onClick }) => ( + + )} + + ); + case 'hybrid_simulation': + return ( + + {({ onClick }) => ( + + )} + + ); + default: + return ( + + ); + } +}; diff --git a/client/modules/datafiles/src/projects/PublishedEntityDetails.tsx b/client/modules/datafiles/src/projects/PublishedEntityDetails.tsx index 88d2eb20ec..3ae09b33d4 100644 --- a/client/modules/datafiles/src/projects/PublishedEntityDetails.tsx +++ b/client/modules/datafiles/src/projects/PublishedEntityDetails.tsx @@ -56,7 +56,7 @@ export const PublishedEntityDetails: React.FC<{ {(entityValue.authors ?? []).length > 0 && ( - Authors + Author(s) {entityValue.authors ?.filter((a) => a.authorship !== false) @@ -76,7 +76,7 @@ export const PublishedEntityDetails: React.FC<{ {entityValue.facility && ( - Experiment Type + Facility {entityValue.facility?.name} @@ -173,14 +173,14 @@ export const PublishedEntityDetails: React.FC<{ )} - {publicationDate && ( - - Date Published - - {new Date(publicationDate).toISOString().split('T')[0]} - - - )} + + Date Published + + {publicationDate + ? new Date(publicationDate).toISOString().split('T')[0] + : '(Appears after publication)'} + + {entityValue.dois && entityValue.dois[0] && ( diff --git a/client/modules/datafiles/src/projects/SubEntityDetails.tsx b/client/modules/datafiles/src/projects/SubEntityDetails.tsx new file mode 100644 index 0000000000..39ab4a4b95 --- /dev/null +++ b/client/modules/datafiles/src/projects/SubEntityDetails.tsx @@ -0,0 +1,166 @@ +import React from 'react'; +import { TEntityValue } from '@client/hooks'; + +import styles from './BaseProjectDetails.module.css'; +import { DescriptionExpander, UsernamePopover } from './BaseProjectDetails'; + +export const SubEntityDetails: React.FC<{ + entityValue: TEntityValue; +}> = ({ entityValue }) => { + return ( +
    + + + + + + + {entityValue.event && ( + + + + + )} + + {entityValue.observationTypes && ( + + + + + )} + + {entityValue.unit && ( + + + + + )} + + {entityValue.modes && entityValue.modes.length > 0 && ( + + + + + )} + + {entityValue.sampleApproach && + entityValue.sampleApproach.length > 0 && ( + + + + + )} + + {entityValue.sampleSize && ( + + + + + )} + + {entityValue.dateStart && ( + + + + + )} + + {entityValue.simulationType && ( + + + + + )} + + {(entityValue.dataCollectors ?? []).length > 0 && ( + + + + + )} + + {entityValue.equipment && entityValue.equipment.length > 0 && ( + + + + + )} + + {entityValue.location && ( + + + + + )} + + {entityValue.restriction && ( + + + + + )} + +
    Event{entityValue.event}
    Observation Type(s) + {entityValue.observationTypes.map((t) => ( +
    {t.name}
    + ))} +
    Unit of Analysis + {entityValue.unit} +
    Mode(s) of Collection + {entityValue.modes.map((mode) => ( +
    {mode}
    + ))} +
    Sampling Approach(es) + {entityValue.sampleApproach.map((approach) => ( +
    {approach}
    + ))} +
    Sample Size + {entityValue.sampleSize} +
    Date(s) of Collection + {new Date(entityValue.dateStart).toISOString().split('T')[0]} + {entityValue.dateEnd && ( + + {' ― '} + {new Date(entityValue.dateEnd).toISOString().split('T')[0]} + + )} +
    Simulation Type + {entityValue.simulationType?.name} +
    Data Collectors + {entityValue.dataCollectors + ?.filter((a) => a.authorship !== false) + .map((u, i) => ( + + + {i !== + (entityValue.dataCollectors?.filter( + (a) => a.authorship !== false + ).length ?? 0) - + 1 && '; '} + + ))} +
    Equipment + {entityValue.equipment.map((t) => ( +
    {t.name}
    + ))} +
    Site Location + {entityValue.location} |{' '} + + Lat {entityValue.latitude} long {entityValue.longitude} + +
    Restriction + {entityValue.restriction} +
    + + Description: + {entityValue.description || '(N/A)'} + +
    + ); +}; diff --git a/client/modules/datafiles/src/projects/constants.ts b/client/modules/datafiles/src/projects/constants.ts index a14750a856..bda51640bd 100644 --- a/client/modules/datafiles/src/projects/constants.ts +++ b/client/modules/datafiles/src/projects/constants.ts @@ -167,7 +167,7 @@ export const DISPLAY_NAMES: Record = { // Experimental [EXPERIMENT]: 'Experiment', [EXPERIMENT_MODEL_CONFIG]: 'Model Configuration', - [EXPERIMENT_SENSOR]: 'Sensor', + [EXPERIMENT_SENSOR]: 'Sensor Information', [EXPERIMENT_ANALYSIS]: 'Analysis', [EXPERIMENT_EVENT]: 'Event', [EXPERIMENT_REPORT]: 'Report', @@ -183,7 +183,7 @@ export const DISPLAY_NAMES: Record = { [HYBRID_SIM_REPORT]: 'Report', [HYBRID_SIM_ANALYSIS]: 'Analysis', [HYBRID_SIM_GLOBAL_MODEL]: 'Global Model', - [HYBRID_SIM_COORDINATOR]: 'Simulation Coordinator', + [HYBRID_SIM_COORDINATOR]: 'Master Simulation Coordinator', [HYBRID_SIM_SIM_SUBSTRUCTURE]: 'Simulation Substructure', [HYBRID_SIM_EXP_SUBSTRUCTURE]: 'Experimental Substructure', [HYBRID_SIM_EXP_OUTPUT]: 'Experimental Output', diff --git a/client/modules/datafiles/src/projects/forms/BaseProjectForm.tsx b/client/modules/datafiles/src/projects/forms/BaseProjectForm.tsx index e8dc54565a..bd576bcbf8 100644 --- a/client/modules/datafiles/src/projects/forms/BaseProjectForm.tsx +++ b/client/modules/datafiles/src/projects/forms/BaseProjectForm.tsx @@ -1,5 +1,5 @@ -import { Button, Form, Input, Select } from 'antd'; -import React, { useCallback, useEffect, useMemo } from 'react'; +import { Alert, Button, Form, Input, Popconfirm, Select } from 'antd'; +import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { nhTypeOptions, facilityOptions, @@ -16,10 +16,13 @@ import { ReferencedDataInput, } from './_fields'; import { TProjectUser } from './_fields/UserSelect'; -import { TBaseProjectValue, useProjectDetail } from '@client/hooks'; +import { + TBaseProjectValue, + useAuthenticatedUser, + useProjectDetail, +} from '@client/hooks'; import { customRequiredMark } from './_common'; import { AuthorSelect } from './_fields/AuthorSelect'; -import { ChangeProjectTypeModal } from '../modals'; import { ProjectTypeRadioSelect } from '../modals/ProjectTypeRadioSelect'; export const ProjectTypeInput: React.FC<{ @@ -80,11 +83,18 @@ export const ProjectTypeInput: React.FC<{ export const BaseProjectForm: React.FC<{ projectId: string; - onChangeType?: () => void; -}> = ({ projectId, onChangeType }) => { + projectType?: string; + onSubmit: (patchMetadata: Record) => void; + changeTypeModal?: React.ReactElement; +}> = ({ projectId, projectType, onSubmit, changeTypeModal }) => { const [form] = Form.useForm(); const { data } = useProjectDetail(projectId ?? ''); - const projectType = data?.baseProject.value.projectType; + + const [hasValidationErrors, setHasValidationErrors] = useState(false); + + if (!projectType) { + projectType = data?.baseProject.value.projectType; + } function processFormData(formData: Record) { const { pi, coPis, teamMembers, guestMembers, ...rest } = formData; @@ -113,29 +123,46 @@ export const BaseProjectForm: React.FC<{ }; } - const watchedValues = Form.useWatch([], form); + const watchedPi = Form.useWatch(['pi'], form); + const watchedCoPis = Form.useWatch(['coPis'], form); + const watchedMembers = Form.useWatch(['teamMembers'], form); + const watchedGuestMembers = Form.useWatch(['guestMembers'], form); const watchedUsers = useMemo( () => [ - ...(watchedValues?.pi ?? []), - ...(watchedValues?.coPis ?? []), - ...(watchedValues?.teamMembers ?? []), - ...(watchedValues?.guestMembers ?? []), + ...(watchedPi ?? []), + ...(watchedCoPis ?? []), + ...(watchedMembers ?? []), + ...(watchedGuestMembers?.filter( + (f: TProjectUser) => !!f && f.fname && f.lname && f.email && f.inst + ) ?? []), ], - [ - watchedValues?.pi, - watchedValues?.coPis, - watchedValues?.teamMembers, - watchedValues?.guestMembers, - ] + [watchedPi, watchedCoPis, watchedMembers, watchedGuestMembers] ); + const { user } = useAuthenticatedUser(); + const [showConfirm, setShowConfirm] = useState(false); + const onFormSubmit = ( + v: Record & { users: TProjectUser[] } + ) => { + setHasValidationErrors(false); + const currentUserInProject = v.users.find( + (u) => u.username === user?.username + ); + if (!currentUserInProject && !showConfirm) { + setShowConfirm(true); + } else { + onSubmit(v); + } + }; + if (!data) return
    Loading
    ; return (
    console.log(processFormData(v))} - onFinishFailed={(v) => console.log(processFormData(v.values))} + onFinish={(v) => onFormSubmit(processFormData(v))} + onFinishFailed={() => setHasValidationErrors(true)} requiredMark={customRequiredMark} > @@ -143,7 +170,12 @@ export const BaseProjectForm: React.FC<{ system, and research approach. Define all acronyms. @@ -151,62 +183,73 @@ export const BaseProjectForm: React.FC<{ {/*TODO: disable in situations where project type shouldn't be changed.*/} - - - - {({ onClick }) => ( - - )} - - + {changeTypeModal && ( + + + {changeTypeModal} + + )} {projectType === 'field_recon' && ( - Specify the Field Research being performed. + Specify the Field Research being performed. Enter a custom value by + typing it into the field and pressing "return". )} - - Specify the natural hazard being researched. - - + {projectType !== 'None' && ( + + Specify the natural hazard being researched. Enter a custom value by + typing it into the field and pressing "return". + + + - + )} {projectType === 'other' && ( <> - The nature or genre of the content. + The nature or genre of the content. Enter a custom value by typing + it into the field and pressing "return". - Specify the facilities involved in this research. + Specify the facilities involved in this research. Enter a custom + value by typing it into the field and pressing "return". - + These users can view, edit, curate, and publish. Include Co-PI(s). + Users can be looked up using their exact username{' '} + only. - -   + +
    +
    @@ -238,7 +298,8 @@ export const BaseProjectForm: React.FC<{ - These users can view, edit, curate, and publish. + These users can view, edit, curate, and publish. Users can be looked up + using their exact username only. - + You can order the authors during the publication process. - + - - Recommended for funded projects. - - - Published data used in the creation of this dataset. @@ -276,41 +341,107 @@ export const BaseProjectForm: React.FC<{ Information giving context, a linked dataset on DesignSafe, or works citing the DOI for this dataset. - + )} + + Recommended for funded projects. + + + Details related to specific events such as natural hazards (ex. Hurricane Katrina). - - Choose informative words that indicate the content of the project. - - + {projectType !== 'None' && ( + + Choose informative words that indicate the content of the project. + Keywords should be comma-separated. + + + - - + )} What is this project about? How can data in this project be reused? How is this project unique? Who is the audience? Description must be between 50 and 5000 characters in length. - + {hasValidationErrors && ( + + One or more fields could not be validated. Please check the form + for errors. + + } + /> + )} - + + If you save this project without adding yourself as a principal + investigator +
    or team member, you will lose access to the project and its + files. + + } + open={showConfirm} + okText="Proceed" + placement="topRight" + afterOpenChange={(isOpen) => { + if (isOpen) { + // Focus on opening so that the popover is accessible via keyboard + document.getElementById('prj-confirm-cancel')?.focus(); + } + }} + cancelButtonProps={{ id: 'prj-confirm-cancel' }} + onOpenChange={(newVal) => { + if (!newVal) setShowConfirm(newVal); + }} + onConfirm={() => onSubmit(processFormData(form.getFieldsValue()))} + > + +
    ); diff --git a/client/modules/datafiles/src/projects/forms/CreateProjectForm.tsx b/client/modules/datafiles/src/projects/forms/CreateProjectForm.tsx new file mode 100644 index 0000000000..4a7ac685df --- /dev/null +++ b/client/modules/datafiles/src/projects/forms/CreateProjectForm.tsx @@ -0,0 +1,152 @@ +import { Button, Form, Input } from 'antd'; +import React, { useEffect } from 'react'; + +import { UserSelect, GuestMembersInput } from './_fields'; +import { TProjectUser } from './_fields/UserSelect'; +import { customRequiredMark } from './_common'; +import { useAuthenticatedUser } from '@client/hooks'; + +export const BaseProjectCreateForm: React.FC<{ + onSubmit: (value: Record) => void; +}> = ({ onSubmit }) => { + const [form] = Form.useForm(); + + function processFormData(formData: Record) { + const { pi, coPis, teamMembers, guestMembers, ...rest } = formData; + return { + ...rest, + users: [...pi, ...coPis, ...teamMembers, ...guestMembers], + }; + } + const { user } = useAuthenticatedUser(); + + /* pre-populate form with logged-in user as PI. */ + useEffect(() => { + form.setFieldValue('pi', [ + { + fname: user?.firstName, + lname: user?.lastName, + username: user?.username, + email: user?.email, + inst: user?.institution, + role: 'pi', + }, + ]); + }, [form, user]); + + if (!user) return null; + return ( +
    { + onSubmit(processFormData(v)); + form.resetFields(); + }} + onFinishFailed={(v) => console.log(processFormData(v.values))} + requiredMark={customRequiredMark} + > + + Incorporate the project's focus with words indicating the hazard, model, + system, and research approach. Define all acronyms. + + + + + +
    + + These users can view, edit, curate, and publish. Include Co-PI(s). + Users can be looked up using their exact username{' '} + only. + + + + + +
    +
    + + + +
    +
    + + + These users can view, edit, curate, and publish. + + + + + + + Add members without a DesignSafe account. These names can be selected as + authors during the publication process. + + + + + What is this project about? How can data in this project be reused? How + is this project unique? Who is the audience? Description must be between + 50 and 5000 characters in length. + + + + + + + + +
    + ); +}; diff --git a/client/modules/datafiles/src/projects/forms/ProjectCategoryForm.tsx b/client/modules/datafiles/src/projects/forms/ProjectCategoryForm.tsx index 997a43e08f..d33f1e084b 100644 --- a/client/modules/datafiles/src/projects/forms/ProjectCategoryForm.tsx +++ b/client/modules/datafiles/src/projects/forms/ProjectCategoryForm.tsx @@ -1,56 +1,35 @@ -import { Form, Input, Button, Select, Checkbox } from 'antd'; -import React, { useCallback, useEffect, useState } from 'react'; +import { Form, Input, Button, Select, Alert } from 'antd'; +import React, { useEffect, useMemo, useState } from 'react'; import { equipmentOptions, observationTypeOptions, } from './ProjectFormDropdowns'; //import { TProjectUser } from './_fields/UserSelect'; -import { - TBaseProjectValue, - TProjectUser, - useProjectDetail, -} from '@client/hooks'; +import { TBaseProjectValue, useProjectDetail } from '@client/hooks'; import { customRequiredMark } from './_common'; import { CATEGORIES_BY_PROJECT_TYPE, DISPLAY_NAMES } from '../constants'; import * as constants from '../constants'; import { DateInput, DropdownSelect, SampleApproachInput } from './_fields'; import { CollectionModeInput } from './_fields/CollectionModeInput'; - -const AuthorSelect: React.FC<{ - projectUsers: TProjectUser[]; - value?: TProjectUser[]; - onChange?: (value: TProjectUser[]) => void; -}> = ({ value, onChange, projectUsers }) => { - const options = projectUsers.map((author) => ({ - value: JSON.stringify(author), - label: `${author.fname} ${author.lname} (${author.email})`, - })); - - const onChangeCallback = useCallback( - (value: string[]) => { - if (onChange) onChange(value.map((a) => JSON.parse(a))); - }, - [onChange] - ); - - return ( - value?.some((v) => user.email === v.email)) - .map((v) => JSON.stringify(v) ?? [])} - options={options} - onChange={onChangeCallback} - /> - ); -}; +import { AuthorSelect } from './_fields/AuthorSelect'; +import { ProjectCategoryFormHelp } from './ProjectCategoryFormHelp'; export const ProjectCategoryForm: React.FC<{ projectType: TBaseProjectValue['projectType']; projectId: string; entityUuid?: string; mode: 'create' | 'edit'; -}> = ({ projectType, projectId, entityUuid, mode = 'edit' }) => { + onSubmit: CallableFunction; + onCancelEdit: CallableFunction; +}> = ({ + projectType, + projectId, + entityUuid, + mode = 'edit', + onSubmit, + onCancelEdit, +}) => { const [form] = Form.useForm(); const { data } = useProjectDetail(projectId ?? ''); const [selectedName, setSelectedName] = useState( @@ -63,38 +42,52 @@ export const ProjectCategoryForm: React.FC<{ label: DISPLAY_NAMES[name], })); - const category = data?.entities.find((e) => e.uuid === entityUuid); + const category = useMemo( + () => data?.entities.find((e) => e.uuid === entityUuid), + [data, entityUuid] + ); + + const [hasValidationErrors, setHasValidationErrors] = useState(false); - const setValues = useCallback(() => { + // Set initial form values + useEffect(() => { if (data && category && mode === 'edit') { form.setFieldsValue({ value: category.value }); setSelectedName(category.name); } - }, [data, form, category, mode]); - useEffect(() => setValues(), [setValues, projectId, category?.uuid]); + setHasValidationErrors(false); + }, [projectId, category, data, form, mode]); - if (!data) return
    Loading
    ; + if (!data) return null; return (
    setSelectedName(v.name)} + onValuesChange={(_, v) => mode === 'create' && setSelectedName(v.name)} layout="vertical" - onFinish={(v) => console.log(v)} + onFinish={(v) => { + onSubmit(v); + form.resetFields(); + setSelectedName(undefined); + onCancelEdit(); + setHasValidationErrors(false); + }} + onFinishFailed={() => setHasValidationErrors(true)} requiredMark={customRequiredMark} > {mode === 'create' && ( - Model Configuration Files describing the design and layout of what is - being tested (some call this a specimen). Sensor Information Files - about the sensor instrumentation used in a model configuration to - conduct one or more event. Event Files from unique occurrences during - which data are generated. Analysis Tables, graphs, visualizations, - Jupyter Notebooks, or other representations of the results. Report - Written accounts made to convey information about an entire project or - experiment. +
    + +
    @@ -114,11 +112,17 @@ export const ProjectCategoryForm: React.FC<{ {selectedName === constants.FIELD_RECON_PLANNING && ( - + Select data collectors for this collection. @@ -128,11 +132,17 @@ export const ProjectCategoryForm: React.FC<{ {selectedName === constants.FIELD_RECON_GEOSCIENCE && ( <> - The nature or subject of the data collected. + The nature or subject of the data collected. Enter a custom value by + typing it into the field and pressing "return". @@ -144,7 +154,12 @@ export const ProjectCategoryForm: React.FC<{
    @@ -159,11 +174,17 @@ export const ProjectCategoryForm: React.FC<{
    - + Select data collectors for this collection. @@ -196,11 +217,18 @@ export const ProjectCategoryForm: React.FC<{ - The equipment used to gather your data. + The equipment used to gather your data. Enter a custom value by + typing it into the field and pressing "return". @@ -240,7 +268,12 @@ export const ProjectCategoryForm: React.FC<{
    @@ -255,11 +288,17 @@ export const ProjectCategoryForm: React.FC<{
    - + Select data collectors for this collection. @@ -295,11 +334,18 @@ export const ProjectCategoryForm: React.FC<{ - The equipment used to gather your data. + The equipment used to gather your data. Enter a custom value by + typing it into the field and pressing "return". @@ -318,25 +364,64 @@ export const ProjectCategoryForm: React.FC<{ )} - + Summarize the purpose of the category and its files. What is it about? What are its features? Description must be between 50 and 5000 characters in length. + {hasValidationErrors && ( + + One or more fields could not be validated. Please check the form + for errors. + + } + /> + )} + + {mode === 'edit' && ( + + )} + )} + + ); +}; export const NeesDetailLayout: React.FC = () => { + const { neesid } = useParams(); + if (!neesid) return null; + const nees = neesid?.split('.')[0]; + return ( -
    - Placeholder for the NEES detail view. -
    + +
    + } /> + +
    +
    ); }; diff --git a/client/src/datafiles/layouts/nees/NeesListingLayout.tsx b/client/src/datafiles/layouts/nees/NeesListingLayout.tsx index f80c3777fb..aa0e21494f 100644 --- a/client/src/datafiles/layouts/nees/NeesListingLayout.tsx +++ b/client/src/datafiles/layouts/nees/NeesListingLayout.tsx @@ -1,11 +1,42 @@ import React from 'react'; -import { Layout } from 'antd'; -import { NeesListing } from '@client/datafiles'; +import { Button, Form, Input, Layout } from 'antd'; +import { DatafilesToolbar, NeesListing } from '@client/datafiles'; +import { useSearchParams } from 'react-router-dom'; + +const NeesListingSearchbar = () => { + const [searchParams, setSearchParams] = useSearchParams(); + const onSubmit = (queryString: string) => { + const newSearchParams = searchParams; + if (queryString) { + newSearchParams.set('q', queryString); + } else { + newSearchParams.delete('q'); + } + + setSearchParams(newSearchParams); + }; + return ( +
    onSubmit(data.query)} + style={{ display: 'inline-flex' }} + > + + + + +
    + ); +}; export const NEESListingLayout: React.FC = () => { return ( -
    Placeholder for the NEES Search.
    + } />
    diff --git a/client/src/datafiles/layouts/projects/ProjectCurationLayout.tsx b/client/src/datafiles/layouts/projects/ProjectCurationLayout.tsx index 8b0b06f4a8..d98e3aebf4 100644 --- a/client/src/datafiles/layouts/projects/ProjectCurationLayout.tsx +++ b/client/src/datafiles/layouts/projects/ProjectCurationLayout.tsx @@ -1,11 +1,11 @@ import { - DatafilesBreadcrumb, ManageCategoryModal, ManagePublishableEntityModal, ProjectCurationFileListing, ProjectNavbar, RelateDataModal, } from '@client/datafiles'; +import { DatafilesBreadcrumb } from '@client/common-components'; import { useProjectDetail } from '@client/hooks'; import { Button } from 'antd'; @@ -130,44 +130,55 @@ export const ProjectCurationLayout: React.FC = () => { if (!data) return
    loading...
    ; return (
    -
    +
    - - 1 | 2 | - - {({ onClick }) => ( - - )} - {' '} - 3 | - - {({ onClick }) => ( - - )} - - + {data.baseProject.value.projectType !== 'other' && ( + + 1 | 2 | + + {({ onClick }) => ( + + )} + {' '} + 3 | + + {({ onClick }) => ( + + )} + + + )}
    { const [searchParams, setSearchParams] = useSearchParams(); @@ -36,16 +36,48 @@ const FileListingSearchBar = () => { }; export const ProjectDetailLayout: React.FC = () => { + const { user } = useAuthenticatedUser(); const { projectId } = useParams(); - const { data } = useProjectDetail(projectId ?? ''); - if (!data || !projectId) return
    loading...
    ; + const { data, isError } = useProjectDetail(projectId ?? ''); + if (isError) { + return ( + + + + ); + } + + if (!user) + return ( + + } /> + + + ); + + if (!data || !projectId) + return ( + + + + ); return ( -
    + } /> -
    + ); }; diff --git a/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx b/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx index 14e79732ed..98589e7bf0 100644 --- a/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx +++ b/client/src/datafiles/layouts/projects/ProjectPipelineSelectLayout.tsx @@ -37,18 +37,21 @@ export const ProjectPipelineSelectLayout: React.FC = () => {
    • Publish new dataset(s) in your project.
    • - If you need to publish subsequent dataset(s), + If you need to publish subsequent dataset(s),  submit a ticket - + {' '} with your project number and the name of the dataset(s).
    - +
  • +
  • + Change the metadata in the curation directory before this + step. +
  • - + +

    Versioning

    @@ -108,22 +119,6 @@ export const ProjectPipelineSelectLayout: React.FC = () => {
    -
    -

    Add/Remove Authors

    -
    -
    - If you need to add or remove authors to/from a publication, - - submit a ticket - - . -
    -
    -
    diff --git a/client/src/datafiles/layouts/projects/ProjectPreviewLayout.tsx b/client/src/datafiles/layouts/projects/ProjectPreviewLayout.tsx index c64d00c21e..141e76a00d 100644 --- a/client/src/datafiles/layouts/projects/ProjectPreviewLayout.tsx +++ b/client/src/datafiles/layouts/projects/ProjectPreviewLayout.tsx @@ -1,13 +1,14 @@ import { BaseProjectDetails, + ProjectBestPracticesModal, ProjectNavbar, ProjectPreview, ProjectTitleHeader, } from '@client/datafiles'; import { useProjectDetail } from '@client/hooks'; -import { Button } from 'antd'; +import { Alert } from 'antd'; import React from 'react'; -import { NavLink, useParams } from 'react-router-dom'; +import { useParams } from 'react-router-dom'; export const ProjectPreviewLayout: React.FC = () => { const { projectId } = useParams(); @@ -16,7 +17,7 @@ export const ProjectPreviewLayout: React.FC = () => { if (!projectId) return null; if (!data) return null; return ( -
    +
    { }} > - - - +
    + {data.baseProject.value.projectType === 'other' && ( + + You will select the data to be published in the next step. + + } + /> + )}
    ); }; diff --git a/client/src/datafiles/layouts/projects/ProjectWorkdirLayout.tsx b/client/src/datafiles/layouts/projects/ProjectWorkdirLayout.tsx index 0fd579cca7..0c8805640e 100644 --- a/client/src/datafiles/layouts/projects/ProjectWorkdirLayout.tsx +++ b/client/src/datafiles/layouts/projects/ProjectWorkdirLayout.tsx @@ -1,22 +1,69 @@ import React from 'react'; import { Link, useParams } from 'react-router-dom'; import { - DatafilesBreadcrumb, + ChangeProjectTypeModal, + EmptyProjectFileListing, FileListing, + ProjectDataTransferModal, ProjectNavbar, } from '@client/datafiles'; +import { DatafilesBreadcrumb } from '@client/common-components'; import { useProjectDetail } from '@client/hooks'; +import { Alert, Button } from 'antd'; export const ProjectWorkdirLayout: React.FC = () => { const { projectId, path } = useParams(); const { data } = useProjectDetail(projectId ?? ''); if (!projectId) return null; if (!data) return
    loading...
    ; + + const changeTypeModal = ( + + {({ onClick }) => ( + + )} + + ); + return ( <> - + {data.baseProject.value.projectType === 'None' ? ( +
    + +

    + Please {changeTypeModal} in order to access data curation + features and publish your data set. +

    +

    + +

    +
    + } + /> +
    + ) : ( +
    + + +
    + )} { system={`project-${data.baseProject.uuid}`} path={path ?? ''} scroll={{ y: 500 }} + emptyListingDisplay={} /> )} diff --git a/client/src/datafiles/layouts/published/PublishedDetailLayout.module.css b/client/src/datafiles/layouts/published/PublishedDetailLayout.module.css new file mode 100644 index 0000000000..f47cb1f1b2 --- /dev/null +++ b/client/src/datafiles/layouts/published/PublishedDetailLayout.module.css @@ -0,0 +1,3 @@ +.yellow-highlight { + background-color: #ece4bf; +} diff --git a/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx b/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx index d6ceea8309..61baf5cd00 100644 --- a/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx +++ b/client/src/datafiles/layouts/published/PublishedDetailLayout.tsx @@ -1,7 +1,13 @@ -import { BaseProjectDetails, DatafilesToolbar } from '@client/datafiles'; +import { + BaseProjectDetails, + DatafilesToolbar, + DownloadDatasetModal, + PublishedCitation, + DownloadCitation, +} from '@client/datafiles'; import { usePublicationDetail, usePublicationVersions } from '@client/hooks'; import React, { useEffect } from 'react'; -import { Button, Form, Input } from 'antd'; +import { Alert, Button, Form, Input, Layout, Spin } from 'antd'; import { Navigate, Outlet, useParams, useSearchParams } from 'react-router-dom'; const FileListingSearchBar = () => { @@ -22,7 +28,10 @@ const FileListingSearchBar = () => { style={{ display: 'inline-flex' }} > - +

    @@ -61,4 +61,4 @@

    Register an A {% endblock %} -{% block footer %}{% include 'includes/footer.html' %}{% endblock footer %} \ No newline at end of file +{% block footer %}{% include 'includes/footer.html' %}{% endblock footer %} diff --git a/designsafe/apps/accounts/tests.py b/designsafe/apps/accounts/tests.py index 86f1b0d398..08f775d209 100644 --- a/designsafe/apps/accounts/tests.py +++ b/designsafe/apps/accounts/tests.py @@ -38,7 +38,7 @@ def test_mailing_list_access(self): self.client.login(username='ds_user', password='user/password') resp = self.client.get(url) self.assertEqual(resp.status_code, 403) - self.client.logout() + user = get_user_model().objects.get(pk=2) perm = Permission.objects.get(codename='view_notification_subscribers') user.user_permissions.add(perm) diff --git a/designsafe/apps/accounts/views.py b/designsafe/apps/accounts/views.py index 4e22674053..346fb04764 100644 --- a/designsafe/apps/accounts/views.py +++ b/designsafe/apps/accounts/views.py @@ -11,7 +11,7 @@ from designsafe.apps.accounts import forms, integrations from designsafe.apps.accounts.models import (NEESUser, DesignSafeProfile, NotificationPreferences) -from designsafe.apps.auth.tasks import check_or_create_agave_home_dir +from designsafe.apps.auth.tasks import check_or_configure_system_and_user_directory, get_systems_to_configure from designsafe.apps.accounts.tasks import create_report from pytas.http import TASClient from pytas.models import User as TASUser @@ -279,7 +279,7 @@ def register(request): if not captcha_json.get("success", False): messages.error(request, "Please complete the reCAPTCHA before submitting your account request.") return render(request,'designsafe/apps/accounts/register.html', context) - + # Once captcha is verified, send request to TRAM. tram_headers = {"tram-services-key": settings.TRAM_SERVICES_KEY} tram_body = {"project_id": settings.TRAM_PROJECT_ID, @@ -291,7 +291,7 @@ def register(request): tram_resp.raise_for_status() logger.info("Received response from TRAM: %s", tram_resp.json()) messages.success(request, "Your request has been received. Please check your email for a project invitation.") - + except requests.HTTPError as exc: logger.debug(exc) messages.error(request, "An unknown error occurred. Please try again later.") @@ -467,9 +467,12 @@ def email_confirmation(request, code=None): user = tas.get_user(username=username) if tas.verify_user(user['id'], code, password=password): logger.info('TAS Account activation succeeded.') - from django.conf import settings - check_or_create_agave_home_dir.apply_async(args=(user.username, settings.AGAVE_STORAGE_SYSTEM)) - check_or_create_agave_home_dir.apply_async(args=(user.username, settings.AGAVE_WORKING_SYSTEM)) + systems_to_configure = get_systems_to_configure(username) + for system in systems_to_configure: + check_or_configure_system_and_user_directory.apply_async(args=(user.username, + system["system_id"], + system["path"], + system["create_path"])) return HttpResponseRedirect(reverse('designsafe_accounts:manage_profile')) else: messages.error(request, diff --git a/designsafe/apps/api/agave/__init__.py b/designsafe/apps/api/agave/__init__.py index 8e05b4d037..2518a258e8 100644 --- a/designsafe/apps/api/agave/__init__.py +++ b/designsafe/apps/api/agave/__init__.py @@ -6,19 +6,17 @@ import logging import requests from agavepy.agave import Agave, load_resource +from tapipy.tapis import Tapis from django.conf import settings logger = logging.getLogger(__name__) AGAVE_RESOURCES = load_resource(getattr(settings, 'AGAVE_TENANT_BASEURL')) -def get_service_account_client(): +def get_service_account_client_v2(): """Return service account agave client. - This service account should use 'ds_admin' token. - ..note:: This service account is an admin account on the Agave tenant. - ..todo:: Should we, instead, use `ds_user`? There might be some issues because of permissionas, but it might be a bit safer.""" @@ -27,11 +25,32 @@ def get_service_account_client(): token=settings.AGAVE_SUPER_TOKEN, resources=AGAVE_RESOURCES) + +def get_service_account_client(): + """Return service account tapis client. + + This service account uses 'wma_prtl' token. + """ + + return Tapis( + base_url=settings.TAPIS_TENANT_BASEURL, + access_token=settings.TAPIS_ADMIN_JWT) + + +def get_tg458981_client(): + """Return tg458981 tapis client.""" + + return Tapis( + base_url=settings.TAPIS_TENANT_BASEURL, + access_token=settings.TAPIS_TG458981_JWT) + + +# TODOV3: Remove sandbox account code def get_sandbox_service_account_client(): """Return sandbox service account""" - return Agave(api_server=settings.AGAVE_SANDBOX_TENANT_BASEURL, - token=settings.AGAVE_SANDBOX_SUPER_TOKEN, - resources=AGAVE_RESOURCES) + return Tapis( + base_url=settings.TAPIS_TENANT_BASEURL, + access_token=settings.TAPIS_ADMIN_JWT) def service_account(): """Return prod or sandbox service client depending on setting.AGAVE_USE_SANDBOX""" diff --git a/designsafe/apps/api/datafiles/handlers.py b/designsafe/apps/api/datafiles/handlers.py index fd67122694..1e4029ab24 100644 --- a/designsafe/apps/api/datafiles/handlers.py +++ b/designsafe/apps/api/datafiles/handlers.py @@ -1,12 +1,13 @@ import logging from designsafe.apps.api.datafiles.notifications import notify -from designsafe.apps.api.datafiles.operations import agave_operations +from designsafe.apps.api.datafiles.operations import tapis_operations from designsafe.apps.api.datafiles.operations import googledrive_operations from designsafe.apps.api.datafiles.operations import dropbox_operations from designsafe.apps.api.datafiles.operations import box_operations from designsafe.apps.api.datafiles.operations import shared_operations from designsafe.apps.api.exceptions import ApiException from django.core.exceptions import PermissionDenied +from tapipy.errors import BaseTapyException from django.urls import reverse logger = logging.getLogger(__name__) @@ -20,8 +21,8 @@ notify_actions = ['move', 'copy', 'rename', 'trash', 'mkdir', 'upload'] operations_mapping = { - 'agave': agave_operations, - 'tapis': agave_operations, + 'agave': tapis_operations, + 'tapis': tapis_operations, 'googledrive': googledrive_operations, 'box': box_operations, 'dropbox': dropbox_operations, @@ -34,7 +35,10 @@ def datafiles_get_handler(api, client, scheme, system, path, operation, username raise PermissionDenied op = getattr(operations_mapping[api], operation) - return op(client, system, path, username=username, **kwargs) + try: + return op(client, system, path, username=username, **kwargs) + except BaseTapyException as exc: + raise ApiException(message=exc.message, status=500) from exc def datafiles_post_handler(api, username, client, scheme, system, @@ -65,7 +69,10 @@ def datafiles_put_handler(api, username, client, scheme, system, try: result = op(client, system, path, **body) - operation in notify_actions and notify(username, operation, '{} operation was successful.'.format(operation.capitalize()), 'SUCCESS', result) + if operation == 'copy' and system != body.get('dest_system', None): + notify(username, operation, 'Your file transfer request has been received and will be processed shortly.'.format(operation.capitalize()), 'SUCCESS', result) + else: + operation in notify_actions and notify(username, operation, '{} operation was successful.'.format(operation.capitalize()), 'SUCCESS', result) return result except Exception as exc: operation in notify_actions and notify(username, operation, 'File operation {} could not be completed.'.format(operation.capitalize()), 'ERROR', {}) diff --git a/designsafe/apps/api/datafiles/operations/shared_operations.py b/designsafe/apps/api/datafiles/operations/shared_operations.py index cc5c7bc46b..17a18f664c 100644 --- a/designsafe/apps/api/datafiles/operations/shared_operations.py +++ b/designsafe/apps/api/datafiles/operations/shared_operations.py @@ -9,7 +9,7 @@ from elasticsearch_dsl import Q import magic from designsafe.apps.data.models.elasticsearch import IndexedFile -from designsafe.apps.api.datafiles.operations.agave_operations import preview, copy, download, download_bytes, listing as agave_listing +from designsafe.apps.api.datafiles.operations.tapis_operations import preview, copy, download, download_bytes, listing as agave_listing # from portal.libs.elasticsearch.indexes import IndexedFile # from portal.apps.search.tasks import agave_indexer, agave_listing_indexer diff --git a/designsafe/apps/api/datafiles/operations/agave_operations.py b/designsafe/apps/api/datafiles/operations/tapis_operations.py similarity index 65% rename from designsafe/apps/api/datafiles/operations/agave_operations.py rename to designsafe/apps/api/datafiles/operations/tapis_operations.py index 875518c16c..fc5cd6889b 100644 --- a/designsafe/apps/api/datafiles/operations/agave_operations.py +++ b/designsafe/apps/api/datafiles/operations/tapis_operations.py @@ -3,9 +3,13 @@ import logging import os import urllib +from pathlib import Path +import tapipy from designsafe.apps.api.datafiles.utils import * from designsafe.apps.data.models.elasticsearch import IndexedFile from designsafe.apps.data.tasks import agave_indexer, agave_listing_indexer +from designsafe.apps.api.filemeta.models import FileMetaModel +from designsafe.apps.api.filemeta.tasks import move_file_meta_async, copy_file_meta_async from django.conf import settings from elasticsearch_dsl import Q import requests @@ -14,7 +18,7 @@ logger = logging.getLogger(__name__) -def listing(client, system, path, offset=0, limit=100, *args, **kwargs): +def listing(client, system, path, offset=0, limit=100, q=None, *args, **kwargs): """ Perform a Tapis file listing @@ -36,18 +40,32 @@ def listing(client, system, path, offset=0, limit=100, *args, **kwargs): list List of dicts containing file metadata """ - raw_listing = client.files.list(systemId=system, - filePath=urllib.parse.quote(path), - offset=int(offset) + 1, - limit=int(limit)) + + if q: + return search(client, system, path, offset=0, limit=100, query_string=q, **kwargs) + + raw_listing = client.files.listFiles(systemId=system, + path=(path or '/'), + offset=int(offset), + limit=int(limit)) try: # Convert file objects to dicts for serialization. - listing = list(map(dict, raw_listing)) + listing = list(map(lambda f: { + 'system': system, + 'type': 'dir' if f.type == 'dir' else 'file', + 'format': 'folder' if f.type == 'dir' else 'raw', + 'mimeType': f.mimeType, + 'path': f"/{f.path}", + 'name': f.name, + 'length': f.size, + 'lastModified': f.lastModified, + '_links': { + 'self': {'href': f.url} + }}, raw_listing)) except IndexError: # Return [] if the listing is empty. listing = [] - # Update Elasticsearch after each listing. # agave_listing_indexer.delay(listing) agave_listing_indexer.delay(listing) @@ -65,13 +83,22 @@ def detail(client, system, path, *args, **kwargs): """ Retrieve the uuid for a file by parsing the query string in _links.metadata.href """ - listing = client.files.list(systemId=system, filePath=urllib.parse.quote(path), offset=0, limit=1) - - href = listing[0]['_links']['metadata']['href'] - qs = urllib.parse.urlparse(href).query - parsed_qs = urllib.parse.parse_qs(qs)['q'][0] - qs_json = json.loads(parsed_qs) - return {**dict(listing[0]), 'uuid': qs_json['associationIds']} + _listing = client.files.listFiles(systemId=system, path=urllib.parse.quote(path), offset=0, limit=1) + f = _listing[0] + listing_res = { + 'system': system, + 'type': 'dir' if f.type == 'dir' else 'file', + 'format': 'folder' if f.type == 'dir' else 'raw', + 'mimeType': f.mimeType, + 'path': f"/{f.path}", + 'name': f.name, + 'length': f.size, + 'lastModified': f.lastModified, + '_links': { + 'self': {'href': f.url} + }} + + return listing_res def iterate_listing(client, system, path, limit=100): @@ -163,11 +190,11 @@ def download(client, system, path=None, paths=None, *args, **kwargs): token = None if client is not None: - token = client.token.token_info['access_token'] + token = client.access_token.access_token zip_endpoint = "https://designsafe-download01.tacc.utexas.edu/check" data = json.dumps({'system': system, 'paths': paths}) # data = json.dumps({'system': 'designsafe.storage.published', 'paths': ['PRJ-2889']}) - resp = requests.put(zip_endpoint, headers={"Authorization": f"Bearer {token}"}, data=data) + resp = requests.put(zip_endpoint, headers={"x-tapis-token": token}, data=data) resp.raise_for_status() download_key = resp.json()["key"] return {"href": f"https://designsafe-download01.tacc.utexas.edu/download/{download_key}"} @@ -191,19 +218,15 @@ def mkdir(client, system, path, dir_name): ------- dict """ - body = { - 'action': 'mkdir', - 'path': dir_name - } - result = client.files.manage(systemId=system, - filePath=urllib.parse.quote(path), - body=body) + path_input = str(Path(path) / Path(dir_name)) + client.files.mkdir(systemId=system, path=path_input) + agave_indexer.apply_async(kwargs={'systemId': system, 'filePath': path, 'recurse': False}, queue='indexing') - return dict(result) + return {"result": "OK"} def move(client, src_system, src_path, dest_system, dest_path): @@ -228,42 +251,25 @@ def move(client, src_system, src_path, dest_system, dest_path): dict """ - # do not allow moves to the same location or across systems - if (os.path.dirname(src_path) == dest_path.strip('/') or src_system != dest_system): - return { - 'system': src_system, - 'path': urllib.parse.quote(src_path), - 'name': os.path.basename(src_path) - } + src_filename = Path(src_path).name + dest_path_full = str(Path(dest_path) / src_filename) + + if src_system != dest_system: + raise ValueError("src_system and dest_system must be identical for move.") + client.files.moveCopy(systemId=src_system, + path=src_path, + operation="MOVE", + newPath=dest_path_full) + + move_file_meta_async.delay(src_system, src_path, dest_system, dest_path_full) - src_file_name = os.path.basename(src_path) - try: - client.files.list(systemId=dest_system, filePath=os.path.join(dest_path, src_file_name)) - dst_file_name = rename_duplicate_path(src_file_name) - full_dest_path = os.path.join(dest_path.strip('/'), dst_file_name) - except: - dst_file_name = src_file_name - full_dest_path = os.path.join(dest_path.strip('/'), src_file_name) - body = {'action': 'move', 'path': full_dest_path} - move_result = client.files.manage( - systemId=src_system, - filePath=urllib.parse.quote(src_path), - body=body - ) - update_meta.apply_async(kwargs={ - "src_system": src_system, - "src_path": src_path, - "dest_system": dest_system, - "dest_path": full_dest_path - }, queue="indexing") - - if os.path.dirname(src_path) != full_dest_path or src_path != full_dest_path: - agave_indexer.apply_async(kwargs={ - 'systemId': src_system, - 'filePath': os.path.dirname(src_path), - 'recurse': False - }, queue='indexing') + #update_meta.apply_async(kwargs={ + # "src_system": src_system, + # "src_path": src_path, + # "dest_system": dest_system, + # "dest_path": dest_path_full + #}, queue="indexing") agave_indexer.apply_async(kwargs={ 'systemId': dest_system, @@ -271,14 +277,13 @@ def move(client, src_system, src_path, dest_system, dest_path): 'recurse': False }, queue='indexing') - if move_result['nativeFormat'] == 'dir': - agave_indexer.apply_async(kwargs={ - 'systemId': dest_system, - 'filePath': full_dest_path, - 'recurse': True - }, queue='indexing') + agave_indexer.apply_async(kwargs={ + 'systemId': dest_system, + 'filePath': dest_path_full, + 'recurse': True + }, queue='indexing') - return move_result + return {"result": "OK"} def copy(client, src_system, src_path, dest_system, dest_path): @@ -305,7 +310,7 @@ def copy(client, src_system, src_path, dest_system, dest_path): """ src_file_name = os.path.basename(src_path) try: - client.files.list(systemId=dest_system, filePath=os.path.join(dest_path, src_file_name)) + client.files.listFiles(systemId=dest_system, path=os.path.join(dest_path, src_file_name)) dst_file_name = rename_duplicate_path(src_file_name) full_dest_path = os.path.join(dest_path.strip('/'), dst_file_name) except: @@ -313,41 +318,46 @@ def copy(client, src_system, src_path, dest_system, dest_path): full_dest_path = os.path.join(dest_path.strip('/'), src_file_name) if src_system == dest_system: - body = {'action': 'copy', 'path': full_dest_path} - copy_result = client.files.manage( - systemId=src_system, - filePath=urllib.parse.quote(src_path.strip('/')), # don't think we need to strip '/' here... - body=body - ) + copy_result = client.files.moveCopy(systemId=src_system, + path=src_path, + operation="COPY", + newPath=full_dest_path) else: - src_url = 'agave://{}/{}'.format(src_system, urllib.parse.quote(src_path)) - copy_result = client.files.importData( - systemId=dest_system, - filePath=urllib.parse.quote(dest_path), - fileName=dst_file_name, - urlToIngest=src_url - ) - - copy_meta.apply_async(kwargs={ - "src_system": src_system, - "src_path": src_path, - "dest_system": dest_system, - "dest_path": full_dest_path + src_url = f'tapis://{src_system}/{src_path}' + dest_url = f'tapis://{dest_system}/{full_dest_path}' + + copy_response = client.files.createTransferTask(elements=[{ + 'sourceURI': src_url, + 'destinationURI': dest_url + }]) + copy_result = { + 'uuid': copy_response.uuid, + 'status': copy_response.status, + } + + + + #copy_meta.apply_async(kwargs={ + # "src_system": src_system, + # "src_path": src_path, + # "dest_system": dest_system, + # "dest_path": full_dest_path + #}, queue='indexing') + + copy_file_meta_async.delay(src_system, src_path, dest_system, full_dest_path) + + agave_indexer.apply_async(kwargs={ + 'systemId': dest_system, + 'filePath': full_dest_path, + 'recurse': True }, queue='indexing') - if copy_result['nativeFormat'] == 'dir': - agave_indexer.apply_async(kwargs={ - 'systemId': dest_system, - 'filePath': full_dest_path, - 'recurse': True - }, queue='indexing') - else: - agave_indexer.apply_async(kwargs={ - 'username': 'ds_admin', - 'systemId': dest_system, - 'filePath': dest_path, - 'recurse': False - }, queue='indexing') + agave_indexer.apply_async(kwargs={ + 'username': 'ds_admin', + 'systemId': dest_system, + 'filePath': dest_path, + 'recurse': False + }, queue='indexing') return dict(copy_result) @@ -381,44 +391,28 @@ def rename(client, system, path, new_name): # a directory... # listing[0].type == 'file' # listing[0].type == 'dir' - listing = client.files.list(systemId=system, filePath=path) path = path.strip('/') - body = {'action': 'rename', 'path': new_name} - - rename_result = client.files.manage( - systemId=system, - filePath=urllib.parse.quote(os.path.join('/', path)), - body=body - ) - update_meta.apply_async(kwargs={ - "src_system": system, - "src_path": path, - "dest_system": system, - "dest_path": os.path.join(os.path.dirname(path), new_name) - }, queue="indexing") - - # if rename_result['nativeFormat'] == 'dir': - if listing[0].type == 'dir': - agave_indexer.apply_async( - kwargs={ - 'systemId': system, - 'filePath': os.path.dirname(path), - 'recurse': False - }, queue='indexing') - agave_indexer.apply_async(kwargs={ - 'systemId': system, - 'filePath': rename_result['path'], - 'recurse': True - }, queue='indexing') - else: - agave_indexer.apply_async( - kwargs={ - 'systemId': system, - 'filePath': os.path.dirname(path), - 'recurse': False - }, queue='indexing') + new_path = str(Path(path).parent / new_name) + + client.files.moveCopy(systemId=system, + path=path, + operation="MOVE", + newPath=new_path) + + move_file_meta_async.delay(system, path, system, new_path) + + agave_indexer.apply_async(kwargs={'systemId': system, + 'filePath': os.path.dirname(path), + 'recurse': False}, + queue='indexing') + + agave_indexer.apply_async(kwargs={'systemId': system, + 'filePath': new_path, + 'recurse': True}, + queue='indexing') + + return {"result": "OK"} - return dict(rename_result) def trash(client, system, path, trash_path): @@ -443,12 +437,9 @@ def trash(client, system, path, trash_path): # Create a trash path if none exists try: - client.files.list(systemId=system, - filePath=trash_path) - except HTTPError as err: - if err.response.status_code != 404: - logger.error("Unexpected exception listing .trash path in {}".format(system)) - raise + client.files.listFiles(systemId=system, + path=trash_path) + except tapipy.errors.NotFoundError: mkdir(client, system, trash_root, trash_foldername) resp = move(client, system, path, system, trash_path) @@ -487,11 +478,10 @@ def upload(client, system, path, uploaded_file, webkit_relative_path=None, *args upload_name = os.path.basename(uploaded_file.name) - resp = client.files.importData(systemId=system, - filePath=urllib.parse.quote(path), - fileName=str(upload_name), - fileToUpload=uploaded_file) + dest_path = os.path.join(path.strip('/'), uploaded_file.name) + response_json = client.files.insert(systemId=system, path=dest_path, file=uploaded_file) + return {"result": "OK"} agave_indexer.apply_async(kwargs={'systemId': system, 'filePath': path, 'recurse': False}, @@ -536,24 +526,28 @@ def preview(client, system, path, href="", max_uses=3, lifetime=600, *args, **kw file_name = path.strip('/').split('/')[-1] file_ext = os.path.splitext(file_name)[1].lower() - href = client.files.list(systemId=system, filePath=path)[0]['_links']['self']['href'] + # href = client.files.list(systemId=system, filePath=path)[0]['_links']['self']['href'] - meta_result = query_file_meta(system, os.path.join('/', path)) - meta = meta_result[0] if len(meta_result) else {} - - args = { - 'url': urllib.parse.unquote(href), - 'maxUses': max_uses, - 'method': 'GET', - 'lifetime': lifetime, - 'noauth': False - } + # meta_result = query_file_meta(system, os.path.join('/', path)) + # meta = meta_result[0] if len(meta_result) else {} + meta = {} + try: + meta = FileMetaModel.get_by_path_and_system(system, path).value + meta.pop("system", None) + meta.pop("path", None) + meta.pop("basePath", None) + meta = {k: json.dumps(meta[k]) for k in meta} + except FileMetaModel.DoesNotExist: + meta = {} - postit_result = client.postits.create(body=args) - url = postit_result['_links']['self']['href'] + postit_result = client.files.createPostIt(systemId=system, path=path, allowedUses=max_uses, validSeconds=lifetime) + url = postit_result.redeemUrl if file_ext in settings.SUPPORTED_TEXT_PREVIEW_EXTS: - file_type = 'text' + if file_ext == '.hazmapper': + file_type = 'hazmapper' + else: + file_type = 'text' elif file_ext in settings.SUPPORTED_IMAGE_PREVIEW_EXTS: file_type = 'image' elif file_ext in settings.SUPPORTED_OBJECT_PREVIEW_EXTS: @@ -590,7 +584,7 @@ def download_bytes(client, system, path): BytesIO object representing the downloaded file. """ file_name = os.path.basename(path) - resp = client.files.download(systemId=system, filePath=path) - result = io.BytesIO(resp.content) + resp = client.files.getContents(systemId=system, path=path) + result = io.BytesIO(resp) result.name = file_name return result diff --git a/designsafe/apps/api/datafiles/operations/transfer_operations.py b/designsafe/apps/api/datafiles/operations/transfer_operations.py index 47e727acd1..52b0f4e754 100644 --- a/designsafe/apps/api/datafiles/operations/transfer_operations.py +++ b/designsafe/apps/api/datafiles/operations/transfer_operations.py @@ -1,4 +1,4 @@ -from designsafe.apps.api.datafiles.operations import agave_operations +from designsafe.apps.api.datafiles.operations import tapis_operations from designsafe.apps.api.datafiles.operations import googledrive_operations from designsafe.apps.api.datafiles.operations import dropbox_operations from designsafe.apps.api.datafiles.operations import box_operations @@ -11,10 +11,10 @@ 'mkdir': googledrive_operations.mkdir }, 'agave': { - 'upload': agave_operations.upload, - 'download': agave_operations.download_bytes, - 'iterate_listing': agave_operations.iterate_listing, - 'mkdir': agave_operations.mkdir + 'upload': tapis_operations.upload, + 'download': tapis_operations.download_bytes, + 'iterate_listing': tapis_operations.iterate_listing, + 'mkdir': tapis_operations.mkdir }, 'dropbox': { 'upload': dropbox_operations.upload, diff --git a/designsafe/apps/api/datafiles/views.py b/designsafe/apps/api/datafiles/views.py index 594b5304e5..347151f064 100644 --- a/designsafe/apps/api/datafiles/views.py +++ b/designsafe/apps/api/datafiles/views.py @@ -23,9 +23,9 @@ def get_client(user, api): client_mappings = { - 'agave': 'agave_oauth', - 'tapis': 'agave_oauth', - 'shared': 'agave_oauth', + 'agave': 'tapis_oauth', + 'tapis': 'tapis_oauth', + 'shared': 'tapis_oauth', 'googledrive': 'googledrive_user_token', 'box': 'box_user_token', 'dropbox': 'dropbox_user_token' diff --git a/designsafe/apps/api/decorators.py b/designsafe/apps/api/decorators.py index f931f421e0..269453cecb 100644 --- a/designsafe/apps/api/decorators.py +++ b/designsafe/apps/api/decorators.py @@ -5,6 +5,7 @@ from functools import wraps from base64 import b64decode from django.conf import settings +from django.http import HttpRequest from django.contrib.auth import get_user_model from django.contrib.auth import login from django.core.exceptions import ObjectDoesNotExist @@ -13,6 +14,8 @@ from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives.serialization import load_der_public_key from cryptography.exceptions import UnsupportedAlgorithm +from tapipy.tapis import Tapis +from tapipy.errors import BaseTapyException #pylint: disable=invalid-name logger = logging.getLogger(__name__) @@ -57,6 +60,50 @@ def _get_jwt_payload(request): return payload + +def tapis_jwt_login(func): + """Decorator to log in a user with their Tapis OAuth token + + ..note:: + It will silently fail and continue executing the wrapped function + if the JWT payload header IS NOT present in the request. If the JWT payload + header IS present then it will continue executing the wrapped function passing + the request object with the correct user logged-in. + """ + #pylint: disable=missing-docstring + @wraps(func) + def decorated_function(request: HttpRequest, *args, **kwargs): + if request.user.is_authenticated: + return func(request, *args, **kwargs) + + tapis_jwt = request.headers.get('X-Tapis-Token') + if not tapis_jwt: + logger.debug('No JWT payload found. Falling back') + return func(request, *args, **kwargs) + + tapis_client = Tapis(base_url=settings.TAPIS_TENANT_BASEURL) + try: + validation_response = tapis_client.validate_token(tapis_jwt) + except BaseTapyException: + return func(request, *args, **kwargs) + + tapis_username = validation_response['tapis/username'] + + try: + user = get_user_model().objects.get(username=tapis_username) + except ObjectDoesNotExist: + logger.exception('Could not find JWT user: %s', tapis_username) + user = None + + if user is not None: + login(request, user, backend="django.contrib.auth.backends.ModelBackend") + + return func(request, *args, **kwargs) + + return decorated_function + #pylint: enable=missing-docstring + + def agave_jwt_login(func): """Decorator to login user with a jwt diff --git a/designsafe/apps/api/filemeta/management/commands/populate_filemeta_table_from_tavpisv2_metadata.py b/designsafe/apps/api/filemeta/management/commands/populate_filemeta_table_from_tavpisv2_metadata.py index 4907a36f58..c1b23a9dff 100644 --- a/designsafe/apps/api/filemeta/management/commands/populate_filemeta_table_from_tavpisv2_metadata.py +++ b/designsafe/apps/api/filemeta/management/commands/populate_filemeta_table_from_tavpisv2_metadata.py @@ -5,7 +5,7 @@ # pylint: disable=logging-fstring-interpolation # pylint: disable=no-member - +import os import logging import json @@ -83,6 +83,10 @@ def populate_filemeta_table(dry_run, do_not_update_existing): continue if not dry_run: + meta_data["value"]["path"] = ( + f"/{meta_data['value']['path'].lstrip('/')}".replace("//", "/") + ) + meta_data["value"]["basePath"] = os.path.dirname(meta_data["value"]["path"]) FileMetaModel.create_or_update_file_meta(meta_data["value"]) updated += 1 diff --git a/designsafe/apps/api/filemeta/models.py b/designsafe/apps/api/filemeta/models.py index 1f8992bea4..2be50c3d13 100644 --- a/designsafe/apps/api/filemeta/models.py +++ b/designsafe/apps/api/filemeta/models.py @@ -5,13 +5,14 @@ from django.utils import timezone -def _get_normalized_path(path) -> str: +def _get_normalized_path(path: str) -> str: """ Return a file path that begins with /" For example, "file.jpg" becomes "/file.jpg" """ if not path.startswith('/'): path = '/' + path + path = path.replace('//', '/') return path diff --git a/designsafe/apps/api/filemeta/tasks.py b/designsafe/apps/api/filemeta/tasks.py new file mode 100644 index 0000000000..209a4cd6be --- /dev/null +++ b/designsafe/apps/api/filemeta/tasks.py @@ -0,0 +1,70 @@ +"""Utils for bulk move/copy of file metadata objects.""" + +import os +from celery import shared_task +from designsafe.apps.api.filemeta.models import FileMetaModel + + +def copy_file_meta(src_system: str, src_path: str, dest_system: str, dest_path: str): + """Create new copies of file metadata when files are copied to a new system/path.""" + clean_src_path = f"/{src_path.lstrip('/').replace('//', '/')}" + clean_dest_path = f"/{dest_path.lstrip('/').replace('//', '/')}" + meta_objs_to_create = ( + FileMetaModel( + value={ + **meta_obj.value, + "system": dest_system, + "path": meta_obj.value["path"].replace(clean_src_path, clean_dest_path), + "basePath": os.path.dirname( + meta_obj.value["path"].replace(clean_src_path, clean_dest_path) + ), + } + ) + for meta_obj in FileMetaModel.objects.filter( + value__system=src_system, + value__path__startswith=f"/{src_path.lstrip('/').replace('//', '/')}", + ) + ) + # return meta_objs_to_create + FileMetaModel.objects.bulk_create(meta_objs_to_create) + + +@shared_task +def copy_file_meta_async( + src_system: str, src_path: str, dest_system: str, dest_path: str +): + """async wrapper around copy_file_meta""" + copy_file_meta(src_system, src_path, dest_system, dest_path) + + +def move_file_meta(src_system: str, src_path: str, dest_system: str, dest_path: str): + """Update system and path of metadata objects to reflect movement to a new path.""" + clean_src_path = f"/{src_path.lstrip('/').replace('//', '/')}" + clean_dest_path = f"/{dest_path.lstrip('/').replace('//', '/')}" + + meta_to_update = list( + FileMetaModel.objects.filter( + value__system=src_system, + value__path__startswith=f"/{src_path.lstrip('/').replace('//', '/')}", + ) + ) + + for meta_obj in meta_to_update: + meta_obj.value = { + **meta_obj.value, + "system": dest_system, + "path": meta_obj.value["path"].replace(clean_src_path, clean_dest_path), + "basePath": os.path.dirname( + meta_obj.value["path"].replace(clean_src_path, clean_dest_path) + ), + } + + FileMetaModel.objects.bulk_update(meta_to_update, ["value"]) + + +@shared_task +def move_file_meta_async( + src_system: str, src_path: str, dest_system: str, dest_path: str +): + """Async wrapper around move_file_meta""" + move_file_meta(src_system, src_path, dest_system, dest_path) diff --git a/designsafe/apps/api/filemeta/tests.py b/designsafe/apps/api/filemeta/tests.py index 3ab665b7bc..cba2d9d0e5 100644 --- a/designsafe/apps/api/filemeta/tests.py +++ b/designsafe/apps/api/filemeta/tests.py @@ -85,6 +85,23 @@ def test_get_file_meta( } +@pytest.mark.django_db +def test_get_file_meta_using_jwt( + regular_user_using_jwt, client, filemeta_db_mock, mock_access_success +): + system_id, path, file_meta = filemeta_db_mock + response = client.get(f"/api/filemeta/{system_id}/{path}") + assert response.status_code == 200 + + assert response.json() == { + "value": file_meta.value, + "name": "designsafe.file", + "lastUpdated": file_meta.last_updated.isoformat( + timespec="milliseconds" + ).replace("+00:00", "Z"), + } + + @pytest.mark.django_db def test_create_file_meta_no_access( client, authenticated_user, filemeta_value_mock, mock_access_failure @@ -122,6 +139,21 @@ def test_create_file_meta( assert file_meta.value == filemeta_value_mock +@pytest.mark.django_db +def test_create_file_meta_using_jwt( + client, regular_user_using_jwt, filemeta_value_mock, mock_access_success +): + response = client.post( + "/api/filemeta/", + data=json.dumps(filemeta_value_mock), + content_type="application/json", + ) + assert response.status_code == 200 + + file_meta = FileMetaModel.objects.first() + assert file_meta.value == filemeta_value_mock + + @pytest.mark.django_db def test_create_file_meta_update_existing_entry( client, @@ -147,8 +179,6 @@ def test_create_file_meta_update_existing_entry( def test_create_file_metadata_missing_system_or_path( client, authenticated_user, - filemeta_db_mock, - filemeta_value_mock, mock_access_success, ): value_missing_system_path = {"foo": "bar"} diff --git a/designsafe/apps/api/filemeta/views.py b/designsafe/apps/api/filemeta/views.py index d15dd49dd3..014cccabc3 100644 --- a/designsafe/apps/api/filemeta/views.py +++ b/designsafe/apps/api/filemeta/views.py @@ -1,11 +1,12 @@ """File Meta view""" + import logging import json from django.http import JsonResponse, HttpRequest -from designsafe.apps.api.datafiles.operations.agave_operations import listing +from designsafe.apps.api.datafiles.operations.tapis_operations import listing from designsafe.apps.api.exceptions import ApiException from designsafe.apps.api.filemeta.models import FileMetaModel -from designsafe.apps.api.views import AuthenticatedApiView +from designsafe.apps.api.views import AuthenticatedAllowJwtApiView logger = logging.getLogger(__name__) @@ -28,8 +29,7 @@ def check_access(request, system_id: str, path: str, check_for_writable_access=F raise ApiException(error_msg, status=403) try: - # TODO_V3 update to use renamed (i.e. "tapis") client - listing(request.user.agave_oauth.client, system_id, path) + listing(request.user.tapis_oauth.client, system_id, path) except Exception as exc: # pylint:disable=broad-exception-caught logger.error( f"user cannot access any related metadata as listing failed for {system_id}/{path} with error {str(exc)}." @@ -37,8 +37,7 @@ def check_access(request, system_id: str, path: str, check_for_writable_access=F raise ApiException("User forbidden to access metadata", status=403) from exc -# TODO_V3 update to allow JWT access DES-2706: https://github.com/DesignSafe-CI/portal/pull/1192 -class FileMetaView(AuthenticatedApiView): +class FileMetaView(AuthenticatedAllowJwtApiView): """View for creating and getting file metadata""" def get(self, request: HttpRequest, system_id: str, path: str): @@ -65,8 +64,7 @@ def get(self, request: HttpRequest, system_id: str, path: str): return JsonResponse(result, safe=False) -# TODO_V3 update to allow JWT access DES-2706: https://github.com/DesignSafe-CI/portal/pull/1192 -class CreateFileMetaView(AuthenticatedApiView): +class CreateFileMetaView(AuthenticatedAllowJwtApiView): """View for creating (and updating) file metadata""" def post(self, request: HttpRequest): @@ -80,7 +78,9 @@ def post(self, request: HttpRequest): raise ApiException("System and path are required in payload", status=400) system_id = value["system"] - path = value["path"] + raw_path = value["path"] + # Normalize raw path to ensure leading slash and remove duplicate slashes. + path = f"/{raw_path.lstrip('/')}".replace("//", "/") check_access(request, system_id, path, check_for_writable_access=True) diff --git a/designsafe/apps/api/fixtures/agave-oauth-token-data.json b/designsafe/apps/api/fixtures/agave-oauth-token-data.json deleted file mode 100644 index c5204eb8a8..0000000000 --- a/designsafe/apps/api/fixtures/agave-oauth-token-data.json +++ /dev/null @@ -1,28 +0,0 @@ -[ -{ - "fields": { - "created": 1461727485, - "access_token": "dc48198091d73c8933c2c0ee96afb01b", - "expires_in": 14400, - "token_type": "bearer", - "user": 1, - "scope": "default", - "refresh_token": "2f715c8eb6962a883c7cd29af7d1165" - }, - "model": "designsafe_auth.agaveoauthtoken", - "pk": 1 -}, -{ - "fields": { - "created": 1463178660, - "access_token": "7834a55e92f3f9b86dc1627bff8d43", - "expires_in": 14400, - "token_type": "bearer", - "user": 2, - "scope": "default", - "refresh_token": "dc1c5b9a5124f88147c783e35b5ca9c" - }, - "model": "designsafe_auth.agaveoauthtoken", - "pk": 2 -} -] diff --git a/designsafe/apps/api/fixtures/user-data.json b/designsafe/apps/api/fixtures/user-data.json deleted file mode 100644 index 42379513c1..0000000000 --- a/designsafe/apps/api/fixtures/user-data.json +++ /dev/null @@ -1,69 +0,0 @@ -[ - { - "fields": { - "username": "ds_admin", - "first_name": "DesignSafe", - "last_name": "Admin", - "is_active": true, - "is_superuser": true, - "is_staff": true, - "last_login": "2016-03-01T00:00:00.000Z", - "groups": [], - "user_permissions": [], - "password": "", - "email": "admin@designsafe-ci.org", - "date_joined": "2016-03-01T00:00:00.000Z" - }, - "model": "auth.user", - "pk": 1 - }, - { - "fields": { - "username": "envision", - "first_name": "DesignSafe", - "last_name": "Admin", - "is_active": true, - "is_superuser": true, - "is_staff": true, - "last_login": "2016-03-01T00:00:00.000Z", - "groups": [], - "user_permissions": [], - "password": "", - "email": "admin@designsafe-ci.org", - "date_joined": "2016-03-01T00:00:00.000Z" - }, - "model": "auth.user", - "pk": 3 - }, - { - "fields": { - "username": "ds_user", - "first_name": "DesignSafe", - "last_name": "User", - "is_active": true, - "is_superuser": false, - "is_staff": false, - "last_login": "2016-03-01T00:00:00.000Z", - "groups": [], - "user_permissions": [], - "password": "", - "email": "user@designsafe-ci.org", - "date_joined": "2016-03-01T00:00:00.000Z" - }, - "model": "auth.user", - "pk": 2 - }, - { - "fields": { - "user_id": "2", - "token_type": "Bearer", - "scope": "PRODUCTION", - "access_token": "fakeaccesstoken", - "refresh_token": "fakerefreshtoken", - "expires_in": "14400", - "created": "1459433273" - }, - "model": "designsafe_auth.agaveoauthtoken", - "pk": 1 - } -] diff --git a/designsafe/apps/api/licenses/views.py b/designsafe/apps/api/licenses/views.py index 9210e9ca09..8904b201f1 100644 --- a/designsafe/apps/api/licenses/views.py +++ b/designsafe/apps/api/licenses/views.py @@ -1,31 +1,32 @@ -from designsafe.apps.api.views import BaseApiView -from designsafe.apps.api.mixins import SecureMixin -from designsafe.libs.common.decorators import profile as profile_fn +"""Views for the licenses API.""" + from django.contrib.auth import get_user_model from django.http.response import HttpResponseForbidden, HttpResponseNotFound from django.http import JsonResponse from django.apps import apps -from designsafe.apps.data.models.agave.util import AgaveJSONEncoder +from designsafe.apps.api.views import AuthenticatedAllowJwtApiView import logging logger = logging.getLogger(__name__) -class LicenseView(SecureMixin, BaseApiView): - @profile_fn +class LicenseView(AuthenticatedAllowJwtApiView): + """View for retrieving licenses for a specific app.""" + def get(self, request, app_name): + """Return the license for the given app.""" if not request.user.is_staff: return HttpResponseForbidden() try: - app_license = apps.get_model('designsafe_licenses', '{}License'.format(app_name)) + app_license = apps.get_model("designsafe_licenses", f"{app_name}License") except LookupError: return HttpResponseNotFound() - username = request.GET.get('username', None) + username = request.GET.get("username", None) if not username: return HttpResponseNotFound() user = get_user_model().objects.get(username=username) licenses = app_license.objects.filter(user=user) - user_license = licenses[0].license_as_str() if len(licenses) > 0 else '' - return JsonResponse({'license': user_license}, encoder=AgaveJSONEncoder) + user_license = licenses[0].license_as_str() if len(licenses) > 0 else "" + return JsonResponse({"license": user_license}) diff --git a/designsafe/apps/api/notifications/fixtures/agave-oauth-token-data.json b/designsafe/apps/api/notifications/fixtures/agave-oauth-token-data.json deleted file mode 100644 index c5204eb8a8..0000000000 --- a/designsafe/apps/api/notifications/fixtures/agave-oauth-token-data.json +++ /dev/null @@ -1,28 +0,0 @@ -[ -{ - "fields": { - "created": 1461727485, - "access_token": "dc48198091d73c8933c2c0ee96afb01b", - "expires_in": 14400, - "token_type": "bearer", - "user": 1, - "scope": "default", - "refresh_token": "2f715c8eb6962a883c7cd29af7d1165" - }, - "model": "designsafe_auth.agaveoauthtoken", - "pk": 1 -}, -{ - "fields": { - "created": 1463178660, - "access_token": "7834a55e92f3f9b86dc1627bff8d43", - "expires_in": 14400, - "token_type": "bearer", - "user": 2, - "scope": "default", - "refresh_token": "dc1c5b9a5124f88147c783e35b5ca9c" - }, - "model": "designsafe_auth.agaveoauthtoken", - "pk": 2 -} -] diff --git a/designsafe/apps/api/notifications/fixtures/user-data.json b/designsafe/apps/api/notifications/fixtures/user-data.json deleted file mode 100644 index 42379513c1..0000000000 --- a/designsafe/apps/api/notifications/fixtures/user-data.json +++ /dev/null @@ -1,69 +0,0 @@ -[ - { - "fields": { - "username": "ds_admin", - "first_name": "DesignSafe", - "last_name": "Admin", - "is_active": true, - "is_superuser": true, - "is_staff": true, - "last_login": "2016-03-01T00:00:00.000Z", - "groups": [], - "user_permissions": [], - "password": "", - "email": "admin@designsafe-ci.org", - "date_joined": "2016-03-01T00:00:00.000Z" - }, - "model": "auth.user", - "pk": 1 - }, - { - "fields": { - "username": "envision", - "first_name": "DesignSafe", - "last_name": "Admin", - "is_active": true, - "is_superuser": true, - "is_staff": true, - "last_login": "2016-03-01T00:00:00.000Z", - "groups": [], - "user_permissions": [], - "password": "", - "email": "admin@designsafe-ci.org", - "date_joined": "2016-03-01T00:00:00.000Z" - }, - "model": "auth.user", - "pk": 3 - }, - { - "fields": { - "username": "ds_user", - "first_name": "DesignSafe", - "last_name": "User", - "is_active": true, - "is_superuser": false, - "is_staff": false, - "last_login": "2016-03-01T00:00:00.000Z", - "groups": [], - "user_permissions": [], - "password": "", - "email": "user@designsafe-ci.org", - "date_joined": "2016-03-01T00:00:00.000Z" - }, - "model": "auth.user", - "pk": 2 - }, - { - "fields": { - "user_id": "2", - "token_type": "Bearer", - "scope": "PRODUCTION", - "access_token": "fakeaccesstoken", - "refresh_token": "fakerefreshtoken", - "expires_in": "14400", - "created": "1459433273" - }, - "model": "designsafe_auth.agaveoauthtoken", - "pk": 1 - } -] diff --git a/designsafe/apps/api/notifications/tests.py b/designsafe/apps/api/notifications/tests.py index cf93dfc8fc..b0faf72ef8 100644 --- a/designsafe/apps/api/notifications/tests.py +++ b/designsafe/apps/api/notifications/tests.py @@ -1,15 +1,12 @@ -import requests import json import os from django.test import TestCase from django.test import Client from django.contrib.auth import get_user_model from django.db.models.signals import post_save -from mock import Mock, patch -from designsafe.apps.auth.models import AgaveOAuthToken +from mock import patch from urllib.parse import urlencode from unittest import skip -from django.dispatch import receiver from django.urls import reverse from designsafe.apps.api.notifications.models import Notification from .receivers import send_notification_ws @@ -19,42 +16,41 @@ logger = logging.getLogger(__name__) -FILEDIR_PENDING = os.path.join(os.path.dirname(__file__), './json/pending.json') -FILEDIR_SUBMITTING = os.path.join(os.path.dirname(__file__), './json/submitting.json') -FILEDIR_PENDING2 = os.path.join(os.path.dirname(__file__), './json/pending2.json') +FILEDIR_PENDING = os.path.join(os.path.dirname(__file__), "./json/pending.json") +FILEDIR_SUBMITTING = os.path.join(os.path.dirname(__file__), "./json/submitting.json") +FILEDIR_PENDING2 = os.path.join(os.path.dirname(__file__), "./json/pending2.json") webhook_body_pending = json.dumps(json.load(open(FILEDIR_PENDING))) webhook_body_pending2 = json.dumps(json.load(open(FILEDIR_PENDING2))) webhook_body_submitting = json.dumps(json.load(open(FILEDIR_SUBMITTING))) - # Create your tests here. @skip("Need to mock websocket call to redis") class NotificationsTestCase(TestCase): - fixtures = ['user-data.json', 'agave-oauth-token-data.json'] + fixtures = ["user-data.json", "auth.json"] def setUp(self): - self.wh_url = reverse('designsafe_api:jobs_wh_handler') + self.wh_url = reverse("designsafe_api:jobs_wh_handler") user = get_user_model().objects.get(pk=2) - user.set_password('password') + user.set_password("password") user.save() self.user = user self.client = Client() - with open('designsafe/apps/api/fixtures/agave-model-config-meta.json') as f: + with open("designsafe/apps/api/fixtures/agave-model-config-meta.json") as f: model_config_meta = json.load(f) self.model_config_meta = model_config_meta - with open('designsafe/apps/api/fixtures/agave-file-meta.json') as f: + with open("designsafe/apps/api/fixtures/agave-file-meta.json") as f: file_meta = json.load(f) self.file_meta = file_meta - with open('designsafe/apps/api/fixtures/agave-experiment-meta.json') as f: + with open("designsafe/apps/api/fixtures/agave-experiment-meta.json") as f: experiment_meta = json.load(f) self.experiment_meta = experiment_meta - with open('designsafe/apps/api/fixtures/agave-project-meta.json') as f: + with open("designsafe/apps/api/fixtures/agave-project-meta.json") as f: project_meta = json.load(f) self.project_meta = project_meta @@ -62,42 +58,59 @@ def test_current_user_is_ds_user(self): """ just making sure the db setup worked. """ - self.assertEqual(self.user.username, 'ds_user') + self.assertEqual(self.user.username, "ds_user") def test_submitting_webhook_returns_200_and_creates_notification(self): - r = self.client.post(self.wh_url, webhook_body_pending, content_type='application/json') + r = self.client.post( + self.wh_url, webhook_body_pending, content_type="application/json" + ) self.assertEqual(r.status_code, 200) n = Notification.objects.last() - status_from_notification = n.to_dict()['extra']['status'] - self.assertEqual(status_from_notification, 'PENDING') + status_from_notification = n.to_dict()["extra"]["status"] + self.assertEqual(status_from_notification, "PENDING") def test_2_webhooks_same_status_same_jobId_should_give_1_notification(self): - r = self.client.post(self.wh_url, webhook_body_pending, content_type='application/json') - - #assert that sending the same status twice doesn't trigger a second notification. - r2 = self.client.post(self.wh_url, webhook_body_pending, content_type='application/json') + r = self.client.post( + self.wh_url, webhook_body_pending, content_type="application/json" + ) + + # assert that sending the same status twice doesn't trigger a second notification. + r2 = self.client.post( + self.wh_url, webhook_body_pending, content_type="application/json" + ) self.assertEqual(Notification.objects.count(), 1) def test_2_webhooks_different_status_same_jobId_should_give_2_notifications(self): - r1 = self.client.post(self.wh_url, webhook_body_pending, content_type='application/json') + r1 = self.client.post( + self.wh_url, webhook_body_pending, content_type="application/json" + ) - r2 = self.client.post(self.wh_url, webhook_body_submitting, content_type='application/json') + r2 = self.client.post( + self.wh_url, webhook_body_submitting, content_type="application/json" + ) self.assertEqual(Notification.objects.count(), 2) def test_2_webhooks_same_status_different_jobId_should_give_2_notifications(self): - r = self.client.post(self.wh_url, webhook_body_pending, content_type='application/json') - r2 = self.client.post(self.wh_url, webhook_body_pending2, content_type='application/json') + r = self.client.post( + self.wh_url, webhook_body_pending, content_type="application/json" + ) + r2 = self.client.post( + self.wh_url, webhook_body_pending2, content_type="application/json" + ) self.assertEqual(Notification.objects.count(), 2) +@skip("TODOv3: Update webhooks with Tapisv3") class TestWebhookViews(TestCase): - fixtures = ['user-data', 'agave-oauth-token-data'] + fixtures = ["user-data", "auth"] def setUp(self): - self.wh_url = reverse('designsafe_api:jobs_wh_handler') - self.mock_agave_patcher = patch('designsafe.apps.auth.models.AgaveOAuthToken.client', autospec=True) + self.wh_url = reverse("designsafe_api:jobs_wh_handler") + self.mock_agave_patcher = patch( + "designsafe.apps.auth.models.TapisOAuthToken.client", autospec=True + ) self.mock_agave = self.mock_agave_patcher.start() self.client.force_login(get_user_model().objects.get(username="ds_user")) @@ -109,7 +122,7 @@ def setUp(self): "port": "1234", "address": "http://designsafe-exec-01.tacc.utexas.edu:1234", "job_uuid": "3373312947011719656-242ac11b-0001-007", - "owner": "ds_user" + "owner": "ds_user", } self.vnc_event = { @@ -117,7 +130,7 @@ def setUp(self): "host": "stampede2.tacc.utexas.edu", "port": "2234", "password": "3373312947011719656-242ac11b-0001-007", - "owner": "ds_user" + "owner": "ds_user", } self.agave_job_running = {"owner": "ds_user", "status": "RUNNING"} @@ -125,62 +138,80 @@ def setUp(self): def tearDown(self): self.mock_agave_patcher.stop() - post_save.connect(send_notification_ws, sender=Notification, dispatch_uid="notification_msg") + post_save.connect( + send_notification_ws, sender=Notification, dispatch_uid="notification_msg" + ) def test_unsupported_event_type(self): - response = self.client.post(reverse('interactive_wh_handler'), - urlencode({'event_type': 'DUMMY'}), - content_type='application/x-www-form-urlencoded') + response = self.client.post( + reverse("interactive_wh_handler"), + urlencode({"event_type": "DUMMY"}), + content_type="application/x-www-form-urlencoded", + ) self.assertTrue(response.status_code == 400) def test_webhook_job_post(self): - job_event = json.load(open(os.path.join(os.path.dirname(__file__), 'json/submitting.json'))) + job_event = json.load( + open(os.path.join(os.path.dirname(__file__), "json/submitting.json")) + ) - response = self.client.post(self.wh_url, json.dumps(job_event), content_type='application/json') + response = self.client.post( + self.wh_url, json.dumps(job_event), content_type="application/json" + ) self.assertEqual(response.status_code, 200) n = Notification.objects.last() - n_status = n.to_dict()['extra']['status'] - self.assertEqual(n_status, job_event['status']) + n_status = n.to_dict()["extra"]["status"] + self.assertEqual(n_status, job_event["status"]) def test_webhook_vnc_post(self): self.mock_agave.jobs.get.return_value = self.agave_job_running link_from_event = "https://tap.tacc.utexas.edu/noVNC/?host=stampede2.tacc.utexas.edu&port=2234&autoconnect=true&encrypt=true&resize=scale&password=3373312947011719656-242ac11b-0001-007" - response = self.client.post(reverse('interactive_wh_handler'), urlencode(self.vnc_event), content_type='application/x-www-form-urlencoded') + response = self.client.post( + reverse("interactive_wh_handler"), + urlencode(self.vnc_event), + content_type="application/x-www-form-urlencoded", + ) self.assertEqual(response.status_code, 200) self.assertTrue(self.mock_agave.meta.addMetadata.called) self.assertEqual(Notification.objects.count(), 1) n = Notification.objects.last() - action_link = n.to_dict()['action_link'] + action_link = n.to_dict()["action_link"] self.assertEqual(action_link, link_from_event) - self.assertEqual(n.operation, 'web_link') + self.assertEqual(n.operation, "web_link") def test_webhook_web_post(self): self.mock_agave.jobs.get.return_value = self.agave_job_running link_from_event = "http://designsafe-exec-01.tacc.utexas.edu:1234" - response = self.client.post(reverse('interactive_wh_handler'), urlencode(self.web_event), content_type='application/x-www-form-urlencoded') + response = self.client.post( + reverse("interactive_wh_handler"), + urlencode(self.web_event), + content_type="application/x-www-form-urlencoded", + ) self.assertEqual(response.status_code, 200) self.assertTrue(self.mock_agave.meta.addMetadata.called) self.assertEqual(Notification.objects.count(), 1) n = Notification.objects.last() - action_link = n.to_dict()['action_link'] + action_link = n.to_dict()["action_link"] self.assertEqual(action_link, link_from_event) - self.assertEqual(n.operation, 'web_link') + self.assertEqual(n.operation, "web_link") def test_webhook_vnc_post_no_matching_job(self): self.mock_agave.jobs.get.return_value = self.agave_job_failed - response = self.client.post(reverse('interactive_wh_handler'), - urlencode(self.vnc_event), - content_type='application/x-www-form-urlencoded') + response = self.client.post( + reverse("interactive_wh_handler"), + urlencode(self.vnc_event), + content_type="application/x-www-form-urlencoded", + ) # no matching running job so it fails self.assertEqual(response.status_code, 400) self.assertEqual(Notification.objects.count(), 0) @@ -188,9 +219,11 @@ def test_webhook_vnc_post_no_matching_job(self): def test_webhook_web_post_no_matching_job(self): self.mock_agave.jobs.get.return_value = self.agave_job_failed - response = self.client.post(reverse('interactive_wh_handler'), - urlencode(self.web_event), - content_type='application/x-www-form-urlencoded') + response = self.client.post( + reverse("interactive_wh_handler"), + urlencode(self.web_event), + content_type="application/x-www-form-urlencoded", + ) # no matching running job so it fails self.assertEqual(response.status_code, 400) self.assertEqual(Notification.objects.count(), 0) diff --git a/designsafe/apps/api/notifications/urls.py b/designsafe/apps/api/notifications/urls.py index 6bdd3f0de6..6faab4b567 100644 --- a/designsafe/apps/api/notifications/urls.py +++ b/designsafe/apps/api/notifications/urls.py @@ -1,14 +1,10 @@ from django.urls import re_path as url from designsafe.apps.api.notifications.views.api import ManageNotificationsView, NotificationsBadgeView -from designsafe.apps.api.notifications.views.webhooks import JobsWebhookView, FilesWebhookView urlpatterns = [ url(r'^$', ManageNotificationsView.as_view(), name='index'), url(r'^badge/$', NotificationsBadgeView.as_view(), name='badge'), - url(r'^notifications/(?P\w+)/?$', ManageNotificationsView.as_view(), name='event_type_notifications'), url(r'^delete/(?P\w+)?$', ManageNotificationsView.as_view(), name='delete_notification'), - url(r'^wh/jobs/$', JobsWebhookView.as_view(), name='jobs_wh_handler'), - url(r'^wh/files/$', FilesWebhookView.as_view(), name='files_wh_handler'), ] diff --git a/designsafe/apps/api/notifications/views/api.py b/designsafe/apps/api/notifications/views/api.py index 85546e0bf9..7e569938ab 100644 --- a/designsafe/apps/api/notifications/views/api.py +++ b/designsafe/apps/api/notifications/views/api.py @@ -1,60 +1,101 @@ import logging -from django.http.response import HttpResponseBadRequest -from django.http import HttpResponse -from django.urls import reverse -from django.shortcuts import render - +import json +from django.http import HttpResponse, JsonResponse from designsafe.apps.api.notifications.models import Notification - from designsafe.apps.api.views import BaseApiView from designsafe.apps.api.mixins import JSONResponseMixin, SecureMixin -from designsafe.apps.api.exceptions import ApiException -import json logger = logging.getLogger(__name__) class ManageNotificationsView(SecureMixin, JSONResponseMixin, BaseApiView): - def get(self, request, event_type = None, *args, **kwargs): - limit = request.GET.get('limit', 0) - page = request.GET.get('page', 0) - - if event_type is not None: - notifs = Notification.objects.filter(event_type = event_type, - deleted = False, - user = request.user.username).order_by('-datetime') - total = Notification.objects.filter(event_type = event_type, - deleted = False, - user = request.user.username).count() + def get(self, request, *args, **kwargs): + """List all notifications of a certain event type.""" + limit = request.GET.get("limit", 0) + page = request.GET.get("page", 0) + read = request.GET.get("read") + event_types = request.GET.getlist("eventTypes[]") + mark_read = request.GET.get( + "markRead", True + ) # mark read by default to support legacy behavior + + query_params = {} + if read is not None: + query_params["read"] = json.loads(read) + + if event_types: + notifs = Notification.objects.filter( + event_type__in=event_types, + deleted=False, + user=request.user.username, + **query_params + ).order_by("-datetime") + total = Notification.objects.filter( + event_type__in=event_types, deleted=False, user=request.user.username + ).count() + unread = Notification.objects.filter( + event_type__in=event_types, + deleted=False, + read=False, + user=request.user.username, + ).count() else: - notifs = Notification.objects.filter(deleted = False, - user = request.user.username).order_by('-datetime') - total = Notification.objects.filter(deleted = False, - user = request.user.username).count() + notifs = Notification.objects.filter( + deleted=False, user=request.user.username, **query_params + ).order_by("-datetime") + total = Notification.objects.filter( + deleted=False, user=request.user.username + ).count() + unread = Notification.objects.filter( + deleted=False, read=False, user=request.user.username + ).count() if limit: limit = int(limit) page = int(page) offset = page * limit - notifs = notifs[offset:offset+limit] + notifs = notifs[offset : offset + limit] - for n in notifs: - if not n.read: - n.mark_read() + if mark_read: + for n in notifs: + if not n.read: + n.mark_read() notifs = [n.to_dict() for n in notifs] - return self.render_to_json_response({'notifs':notifs, 'page':page, 'total': total}) - # return self.render_to_json_response(notifs) + return JsonResponse( + {"notifs": notifs, "page": page, "total": total, "unread": unread} + ) def post(self, request, *args, **kwargs): body_json = json.loads(request.body) - nid = body_json['id'] - read = body_json['read'] - n = Notification.get(id = nid) + nid = body_json["id"] + read = body_json["read"] + n = Notification.get(id=nid) n.read = read n.save() + def patch(self, request, *args, **kwargs): + """Mark notifications as read.""" + body = json.loads(request.body) + event_types = body.get("eventTypes") + + if event_types is not None: + notifs = Notification.objects.filter( + deleted=False, + read=False, + event_type__in=event_types, + user=request.user.username, + ) + else: + notifs = Notification.objects.filter( + deleted=False, read=False, user=request.user.username + ) + for n in notifs: + n.mark_read() + + return JsonResponse({"message": "OK"}) + def delete(self, request, pk, *args, **kwargs): # body_json = json.loads(request.body) # nid = body_json['id'] @@ -62,19 +103,34 @@ def delete(self, request, pk, *args, **kwargs): # n = Notification.objects.get(pk = pk) # n.deleted = deleted # n.save() - if pk == 'all': - items=Notification.objects.filter(deleted=False, user=str(request.user)) + if pk == "all": + items = Notification.objects.filter(deleted=False, user=str(request.user)) for i in items: i.mark_deleted() else: x = Notification.objects.get(pk=pk) x.mark_deleted() - return HttpResponse('OK') + return HttpResponse("OK") + class NotificationsBadgeView(SecureMixin, JSONResponseMixin, BaseApiView): + """View for notifications badge count""" def get(self, request, *args, **kwargs): - unread = Notification.objects.filter(deleted = False, read = False, - user = request.user.username).count() - return self.render_to_json_response({'unread': unread}) + """Get count of unread notifications of a certain event type.""" + event_types = request.GET.getlist("eventTypes[]") + + if event_types: + unread = Notification.objects.filter( + event_type__in=event_types, + deleted=False, + read=False, + user=request.user.username, + ).count() + else: + unread = Notification.objects.filter( + deleted=False, read=False, user=request.user.username + ).count() + + return JsonResponse({"unread": unread}) diff --git a/designsafe/apps/api/notifications/views/webhooks.py b/designsafe/apps/api/notifications/views/webhooks.py deleted file mode 100644 index 3dd9470757..0000000000 --- a/designsafe/apps/api/notifications/views/webhooks.py +++ /dev/null @@ -1,67 +0,0 @@ -from django.http.response import HttpResponseBadRequest -from django.core.exceptions import ObjectDoesNotExist -from django.contrib.auth import get_user_model -from django.urls import reverse -from django.views.decorators.csrf import csrf_exempt -from django.utils.decorators import method_decorator -from django.shortcuts import render -from django.http import HttpResponse -from django.contrib.sessions.models import Session -from django.conf import settings - -from celery import shared_task -from requests import ConnectionError, HTTPError -from agavepy.agave import Agave, AgaveException - -from designsafe.apps.api.notifications.models import Notification - -from designsafe.apps.api.views import BaseApiView -from designsafe.apps.api.mixins import JSONResponseMixin, SecureMixin -from designsafe.apps.api.exceptions import ApiException - -from designsafe.apps.workspace.tasks import handle_webhook_request - -import json -import logging - -logger = logging.getLogger(__name__) - - - -class JobsWebhookView(JSONResponseMixin, BaseApiView): - """ - Dispatches notifications when receiving a POST request from the Agave - webhook service. - - """ - - @method_decorator(csrf_exempt) - def dispatch(self, *args, **kwargs): - return super(JobsWebhookView, self).dispatch(*args, **kwargs) - - def get(self, request, *args, **kwargs): - return HttpResponse(settings.WEBHOOK_POST_URL.strip('/') + '/api/notifications/wh/jobs/') - - def post(self, request, *args, **kwargs): - """ - Calls handle_webhook_request on webhook JSON body - to notify the user of the progress of the job. - - """ - - job = json.loads(request.body) - - handle_webhook_request(job) - return HttpResponse('OK') - - -class FilesWebhookView(SecureMixin, JSONResponseMixin, BaseApiView): - @method_decorator(csrf_exempt) - def dispatch(self, *args, **kwargs): - return super(FilesWebhookView, self).dispatch(*args, **kwargs) - - def post(self, request, *args, **kwargs): - notification = json.loads(request.body) - logger.debug(notification) - - return HttpResponse('OK') diff --git a/designsafe/apps/api/projects/tests.py b/designsafe/apps/api/projects/tests.py index c2c63069e7..0b844416a2 100644 --- a/designsafe/apps/api/projects/tests.py +++ b/designsafe/apps/api/projects/tests.py @@ -1,18 +1,20 @@ from designsafe.apps.api.projects.fixtures import exp_instance_meta, exp_instance_resp, exp_entity_meta, exp_entity_json import pytest +@pytest.mark.skip(reason="TODOv3: Update projects with Tapisv3") @pytest.mark.django_db -def test_project_instance_get(client, mock_agave_client, authenticated_user): - mock_agave_client.meta.getMetadata.return_value = exp_instance_meta +def test_project_instance_get(client, mock_tapis_client, authenticated_user): + mock_tapis_client.meta.getMetadata.return_value = exp_instance_meta resp = client.get('/api/projects/1052668239654088215-242ac119-0001-012/') actual = resp.json() expected = exp_instance_resp assert actual == expected +@pytest.mark.skip(reason="TODOv3: Update projects with Tapisv3") @pytest.mark.django_db -def test_project_meta_all(client, mock_agave_client, authenticated_user): - mock_agave_client.meta.getMetadata.return_value = exp_instance_meta - mock_agave_client.meta.listMetadata.return_value = exp_entity_meta +def test_project_meta_all(client, mock_tapis_client, authenticated_user): + mock_tapis_client.meta.getMetadata.return_value = exp_instance_meta + mock_tapis_client.meta.listMetadata.return_value = exp_entity_meta resp = client.get('/api/projects/1052668239654088215-242ac119-0001-012/meta/all/') actual = resp.json() expected = exp_entity_json diff --git a/designsafe/apps/api/projects_v2/conftest.py b/designsafe/apps/api/projects_v2/conftest.py new file mode 100644 index 0000000000..65d4d4a4ef --- /dev/null +++ b/designsafe/apps/api/projects_v2/conftest.py @@ -0,0 +1,65 @@ +"""Fixtures related to project metadata.""" +import pytest +from designsafe.apps.api.projects_v2 import constants +from designsafe.apps.api.projects_v2.operations.project_meta_operations import ( + create_project_metdata, + create_entity_metadata, + add_file_associations, + set_file_tags, + FileObj, +) +from designsafe.apps.api.projects_v2.operations.graph_operations import ( + initialize_project_graph, + add_node_to_project, +) + + +@pytest.fixture +def project_with_associations(regular_user): + """Project with associations fixture""" + project_value = { + "title": "Test Project", + "projectId": "PRJ-1234", + "users": [{"username": regular_user.username, "role": "pi"}], + "projectType": "experimental", + } + experiment_value = {"title": "Test Experiment", "description": "Experiment test"} + model_config_value = { + "title": "Test Entity", + "description": "Entity with file associations", + } + project = create_project_metdata(project_value) + initialize_project_graph("PRJ-1234") + + experiment = create_entity_metadata( + "PRJ-1234", name=constants.EXPERIMENT, value=experiment_value + ) + model_config = create_entity_metadata( + "PRJ-1234", name=constants.EXPERIMENT_MODEL_CONFIG, value=model_config_value + ) + + experiment_node = add_node_to_project( + "PRJ-1234", "NODE_ROOT", experiment.uuid, experiment.name + ) + add_node_to_project( + "PRJ-1234", experiment_node, model_config.uuid, model_config.name + ) + + file_objs = [ + FileObj( + system="project.system", name="file1", path="/path/to/file1", type="file" + ), + FileObj( + system="project.system", + name="file1", + path="/path/to/other/file1", + type="file", + ), + FileObj(system="project.system", name="dir1", path="/path/to/dir1", type="dir"), + ] + add_file_associations(model_config.uuid, file_objs) + set_file_tags(model_config.uuid, "/path/to/file1", ["test_tag"]) + set_file_tags(model_config.uuid, "/path/to/dir1/nested/file", ["test_tag"]) + set_file_tags(model_config.uuid, "/path/to/other/file1", ["test_tag"]) + + yield (project, experiment.uuid, project.uuid) diff --git a/designsafe/apps/api/projects_v2/migration_utils/graph_constructor.py b/designsafe/apps/api/projects_v2/migration_utils/graph_constructor.py index 5650bff5c5..0c0d9c2d80 100644 --- a/designsafe/apps/api/projects_v2/migration_utils/graph_constructor.py +++ b/designsafe/apps/api/projects_v2/migration_utils/graph_constructor.py @@ -3,10 +3,11 @@ import json from typing import TypedDict, Optional from uuid import uuid4 +import copy from pathlib import Path import networkx as nx from django.utils.text import slugify -from designsafe.apps.api.agave import service_account +from designsafe.apps.api.agave import get_service_account_client_v2 as service_account from designsafe.apps.data.models.elasticsearch import IndexedPublication from designsafe.apps.projects.managers.publication import FIELD_MAP from designsafe.apps.api.projects_v2.schema_models import PATH_SLUGS @@ -158,6 +159,7 @@ def construct_graph_recurse( entity_list: list[dict], parent: dict, parent_node_id: str, + allow_published_analysis=False, ): """Recurse through an entity's children and add nodes/edges. B is a child of A if all of A's descendants are referenced in B's association IDs.""" @@ -170,9 +172,17 @@ def construct_graph_recurse( ]: association_path.pop(-2) + # Account for legacy pubs that associated experimental analysis at top level + _allowed_relations = copy.deepcopy(ALLOWED_RELATIONS) + + if allow_published_analysis: + _allowed_relations[names.PROJECT].append(names.EXPERIMENT_ANALYSIS) + _allowed_relations[names.PROJECT].append(names.SIMULATION_ANALYSIS) + _allowed_relations[names.PROJECT].append(names.SIMULATION_REPORT) + children = filter( lambda child: ( - child["name"] in ALLOWED_RELATIONS.get(parent["name"], []) + child["name"] in _allowed_relations.get(parent["name"], []) and set(child["associationIds"]) >= set(association_path) ), entity_list, @@ -196,7 +206,13 @@ def construct_graph_recurse( } graph.add_node(child_node_id, **child_data) graph.add_edge(parent_node_id, child_node_id) - construct_graph_recurse(graph, entity_list, child, child_node_id) + construct_graph_recurse( + graph, + entity_list, + child, + child_node_id, + allow_published_analysis=allow_published_analysis, + ) def get_entities_from_publication(project_id: str, version=None): @@ -215,7 +231,9 @@ def get_entities_from_publication(project_id: str, version=None): return entity_list -def construct_publication_graph(project_id, version=None) -> nx.DiGraph: +def construct_publication_graph( + project_id, version=None, allow_published_analysis=False +) -> nx.DiGraph: """Construct a directed graph from a publications's association IDs.""" entity_listing = get_entities_from_publication(project_id, version=version) root_entity = entity_listing[0] @@ -225,7 +243,7 @@ def construct_publication_graph(project_id, version=None) -> nx.DiGraph: project_type = root_entity["value"]["projectType"] root_node_id = "NODE_ROOT" - if project_type == "other": + if project_type == "other" or project_type == "field_reconnaissance": root_node_data = {"uuid": None, "name": None, "projectType": "other"} else: root_node_data = { @@ -234,7 +252,7 @@ def construct_publication_graph(project_id, version=None) -> nx.DiGraph: "projectType": root_entity["value"]["projectType"], } pub_graph.add_node(root_node_id, **root_node_data) - if project_type == "other": + if project_type == "other" or project_type == "field_reconnaissance": base_node_data = { "uuid": root_entity["uuid"], "name": root_entity["name"], @@ -243,7 +261,13 @@ def construct_publication_graph(project_id, version=None) -> nx.DiGraph: base_node_id = f"NODE_{uuid4()}" pub_graph.add_node(base_node_id, **base_node_data) pub_graph.add_edge(root_node_id, base_node_id) - construct_graph_recurse(pub_graph, entity_listing, root_entity, root_node_id) + construct_graph_recurse( + pub_graph, + entity_listing, + root_entity, + root_node_id, + allow_published_analysis=allow_published_analysis, + ) pub_graph.nodes["NODE_ROOT"]["basePath"] = f"/{project_id}" # pub_graph = construct_entity_filepaths(entity_listing, pub_graph, version) @@ -325,7 +349,11 @@ def transform_pub_entities(project_id: str, version: Optional[int] = None): """Validate publication entities against their corresponding model.""" entity_listing = get_entities_from_publication(project_id, version=version) base_pub_meta = IndexedPublication.from_id(project_id, revision=version).to_dict() - pub_graph = construct_publication_graph(project_id, version) + schema_version = base_pub_meta.get("version", 1) + + pub_graph = construct_publication_graph( + project_id, version, allow_published_analysis=(schema_version == 1) + ) path_mappings = [] for _, node_data in pub_graph.nodes.items(): @@ -366,7 +394,7 @@ def transform_pub_entities(project_id: str, version: Optional[int] = None): ) else: pub_graph.nodes[pub]["version"] = 1 - pub_graph.nodes[pub]["publicationDate"] = str(base_pub_meta["created"]) + pub_graph.nodes[pub]["publicationDate"] = base_pub_meta["created"].isoformat() pub_graph.nodes[pub]["status"] = "published" return pub_graph, path_mappings diff --git a/designsafe/apps/api/projects_v2/migration_utils/project_db_ingest.py b/designsafe/apps/api/projects_v2/migration_utils/project_db_ingest.py index 6d968fed4e..be279047e1 100644 --- a/designsafe/apps/api/projects_v2/migration_utils/project_db_ingest.py +++ b/designsafe/apps/api/projects_v2/migration_utils/project_db_ingest.py @@ -124,9 +124,9 @@ def ingest_graphs(): def fix_authors(meta: ProjectMetadata): """Ensure that authors contain complete name/institution information.""" base_project = meta.base_project - + print(meta.project_id) def get_complete_author(partial_author): - if partial_author["name"] and not partial_author["guest"]: + if partial_author.get("name") and not partial_author.get("guest"): author_info = next( ( user @@ -143,7 +143,7 @@ def get_complete_author(partial_author): meta.value["authors"] = [ get_complete_author(author) for author in meta.value["authors"] - if author["authorship"] is True + if author.get("authorship", True) is True ] if meta.value.get("projectType") == "other": @@ -153,7 +153,7 @@ def get_complete_author(partial_author): meta.value["dataCollectors"] = [ get_complete_author(author) for author in meta.value["dataCollectors"] - if author["authorship"] is True + if author.get("authorship", True) is True ] schema_model = SCHEMA_MAPPING[meta.name] schema_model.model_validate(meta.value) diff --git a/designsafe/apps/api/projects_v2/migration_utils/publication_transforms.py b/designsafe/apps/api/projects_v2/migration_utils/publication_transforms.py index 0f794dfa12..88cd56a4b7 100644 --- a/designsafe/apps/api/projects_v2/migration_utils/publication_transforms.py +++ b/designsafe/apps/api/projects_v2/migration_utils/publication_transforms.py @@ -279,6 +279,8 @@ def transform_entity(entity: dict, base_pub_meta: dict, base_path: str): reprsentation of the `value` attribute.""" model = SCHEMA_MAPPING[entity["name"]] authors = entity["value"].get("authors", None) + if not authors: + authors = entity.get("authors", None) schema_version = base_pub_meta.get("version", 1) if authors and schema_version == 1: updated_authors = get_v1_authors(entity, base_pub_meta["users"]) @@ -287,6 +289,24 @@ def transform_entity(entity: dict, base_pub_meta: dict, base_path: str): fixed_authors = list(map(convert_v2_user, entity["authors"])) entity["value"]["authors"] = sorted(fixed_authors, key=lambda a: a["order"]) + data_collectors = entity["value"].get("dataCollectors", None) + if data_collectors: + fixed_data_collectors = [] + for collector in data_collectors: + if collector.get("guest", None): + fixed_data_collectors.append(collector) + else: + fixed_data_collectors.append( + {**collector, **get_user_info(collector["name"])} + ) + entity["value"]["dataCollectors"] = sorted( + fixed_data_collectors, key=lambda a: a["order"] + ) + + legacy_doi = entity.get("doi", None) + if legacy_doi: + entity["value"]["dois"] = [legacy_doi.lstrip("doi:")] + tombstone_uuids = base_pub_meta.get("tombstone", []) if entity["uuid"] in tombstone_uuids: entity["value"]["tombstone"] = True @@ -302,16 +322,18 @@ def transform_entity(entity: dict, base_pub_meta: dict, base_path: str): file_objs = entity.get("fileObjs", None) # Some legacy experiment/hybrid sim entities have file_objs incorrectly # populated from their children. In these cases, _filepaths is empty. - if file_objs and entity.get("_filePaths", None) != []: + if file_objs and not ( + entity.get("_filePaths", None) == [] + and ( + entity["name"] + in [ + "designsafe.project.experiment", + "designsafe.project.hybrid_simulation", + ] + ) + ): entity["value"]["fileObjs"] = file_objs # Avoid "fixing" tags for legacy projects that don't have tree-based file layouts - if entity["value"].get("fileTags", False): - # entity["value"]["fileTags"] = update_file_tag_paths( - # entity, base_path - # ) - entity["value"]["fileTags"] = update_file_tag_paths_legacy( - entity, base_path - ) # new_file_objs, path_mapping = update_file_objs( # entity, base_path, system_id=settings.PUBLISHED_SYSTEM @@ -323,6 +345,13 @@ def transform_entity(entity: dict, base_pub_meta: dict, base_path: str): entity["value"]["fileObjs"] = new_file_objs else: path_mapping = {} + + if entity["value"].get("fileTags", False): + # entity["value"]["fileTags"] = update_file_tag_paths( + # entity, base_path + # ) + entity["value"]["fileTags"] = update_file_tag_paths_legacy(entity, base_path) + validated_model = model.model_validate(entity["value"]) if getattr(validated_model, "project_type", None) == "other": diff --git a/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py b/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py index c8a222faea..8405870e28 100644 --- a/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py +++ b/designsafe/apps/api/projects_v2/operations/_tests/publish_unit_test.py @@ -22,56 +22,6 @@ ) -@pytest.fixture -def project_with_associations(): - project_value = { - "title": "Test Project", - "projectId": "PRJ-1234", - "users": [], - "projectType": "experimental", - } - experiment_value = {"title": "Test Experiment", "description": "Experiment test"} - model_config_value = { - "title": "Test Entity", - "description": "Entity with file associations", - } - project = create_project_metdata(project_value) - initialize_project_graph("PRJ-1234") - - experiment = create_entity_metadata( - "PRJ-1234", name=constants.EXPERIMENT, value=experiment_value - ) - model_config = create_entity_metadata( - "PRJ-1234", name=constants.EXPERIMENT_MODEL_CONFIG, value=model_config_value - ) - - experiment_node = add_node_to_project( - "PRJ-1234", "NODE_ROOT", experiment.uuid, experiment.name - ) - add_node_to_project( - "PRJ-1234", experiment_node, model_config.uuid, model_config.name - ) - - file_objs = [ - FileObj( - system="project.system", name="file1", path="/path/to/file1", type="file" - ), - FileObj( - system="project.system", - name="file1", - path="/path/to/other/file1", - type="file", - ), - FileObj(system="project.system", name="dir1", path="/path/to/dir1", type="dir"), - ] - add_file_associations(model_config.uuid, file_objs) - set_file_tags(model_config.uuid, "/path/to/file1", ["test_tag"]) - set_file_tags(model_config.uuid, "/path/to/dir1/nested/file", ["test_tag"]) - set_file_tags(model_config.uuid, "/path/to/other/file1", ["test_tag"]) - - yield (project, experiment.uuid, project.uuid) - - @pytest.mark.django_db def test_publication_subtree(project_with_associations): (project, exp_uuid, project_uuid) = project_with_associations diff --git a/designsafe/apps/api/projects_v2/operations/datacite_operations.py b/designsafe/apps/api/projects_v2/operations/datacite_operations.py index 21e4b48937..1f7e978ef5 100644 --- a/designsafe/apps/api/projects_v2/operations/datacite_operations.py +++ b/designsafe/apps/api/projects_v2/operations/datacite_operations.py @@ -97,6 +97,7 @@ def get_datacite_json( datacite_json["types"]["resourceType"] += f"/{location}" datacite_json["types"]["resourceTypeGeneral"] = "Dataset" + datacite_json["version"] = version datacite_json["descriptions"] = [ { diff --git a/designsafe/apps/api/projects_v2/operations/graph_operations.py b/designsafe/apps/api/projects_v2/operations/graph_operations.py index 9661234820..c8963bab5b 100644 --- a/designsafe/apps/api/projects_v2/operations/graph_operations.py +++ b/designsafe/apps/api/projects_v2/operations/graph_operations.py @@ -170,6 +170,23 @@ def remove_nodes_from_project(project_id: str, node_ids: list[str]): graph_model.save() +def remove_nodes_for_entity(project_id: str, entity_uuid: str): + """ + Remove an entity from the tree by deleting all nodes that point to its UUID. + """ + graph_model = ProjectMetadata.objects.get( + name=constants.PROJECT_GRAPH, base_project__value__projectId=project_id + ) + project_graph = nx.node_link_graph(graph_model.value) + nodes_to_remove = [ + node + for node in project_graph.nodes + if project_graph.nodes[node].get("uuid") == entity_uuid + ] + if nodes_to_remove: + remove_nodes_from_project(project_id, nodes_to_remove) + + def reorder_project_nodes(project_id: str, node_id: str, new_index: int): """Update the database entry for the project graph to reorder nodes.""" # Lock the project graph's tale row to prevent conflicting updates. diff --git a/designsafe/apps/api/projects_v2/operations/project_archive_operations.py b/designsafe/apps/api/projects_v2/operations/project_archive_operations.py index d94a4031fc..58b47b4feb 100644 --- a/designsafe/apps/api/projects_v2/operations/project_archive_operations.py +++ b/designsafe/apps/api/projects_v2/operations/project_archive_operations.py @@ -8,6 +8,7 @@ import zipfile from typing import Optional from django.conf import settings +from celery import shared_task from designsafe.apps.api.publications_v2.models import Publication logger = logging.getLogger(__name__) @@ -85,3 +86,9 @@ def create_metadata(): set_perms(arc_dir, 0o755) create_metadata() create_archive() + + +@shared_task +def archive_publication_async(project_id: str, version: Optional[str] = 1): + """async wrapper around archive""" + archive(project_id, version) diff --git a/designsafe/apps/api/projects_v2/operations/project_meta_operations.py b/designsafe/apps/api/projects_v2/operations/project_meta_operations.py index 45abdf3820..51e972b51f 100644 --- a/designsafe/apps/api/projects_v2/operations/project_meta_operations.py +++ b/designsafe/apps/api/projects_v2/operations/project_meta_operations.py @@ -7,6 +7,7 @@ FileObj, FileTag, PartialEntityWithFiles, + BaseProject, ) from designsafe.apps.api.projects_v2 import constants from designsafe.apps.api.projects_v2.models import ProjectMetadata @@ -57,6 +58,7 @@ def delete_entity(uuid: str): if entity.name in (constants.PROJECT, constants.PROJECT_GRAPH): raise ValueError("Cannot delete a top-level project or graph object.") entity.delete() + return "OK" @@ -72,9 +74,33 @@ def clear_entities(project_id): return "OK" +def get_changed_users(old_value: BaseProject, new_value: BaseProject): + """ + Diff users between incoming and existing project metadata to determine which users + need permissions to be added/removed via Tapis. + """ + old_users = set( + (u.username for u in old_value.users if u.username and u.role != "guest") + ) + new_users = set( + (u.username for u in new_value.users if u.username and u.role != "guest") + ) + + users_to_add = list(new_users - old_users) + users_to_remove = list(old_users - new_users) + + return users_to_add, users_to_remove + + def change_project_type(project_id, new_value): """Change the type of a project and update its value.""" project = ProjectMetadata.get_project_by_id(project_id) + + # persist Hazmapper maps when changing project type + hazmapper_maps = project.value.get("hazmapperMaps", None) + if hazmapper_maps: + new_value["hazmapperMaps"] = hazmapper_maps + schema_model = SCHEMA_MAPPING[constants.PROJECT] validated_model = schema_model.model_validate(new_value) project.value = validated_model.model_dump() @@ -138,6 +164,20 @@ def remove_file_associations(uuid: str, file_paths: list[str]): filtered_file_objs = _filter_file_objs(entity_file_model.file_objs, file_paths) entity.value["fileObjs"] = [f.model_dump() for f in filtered_file_objs] + + # Remove tags associated with these entity/file path combinations. + tagged_paths = [] + for path in file_paths: + tagged_paths += [ + t["path"] + for t in entity.value.get("fileTags", []) + if t["path"].startswith(path) + ] + entity.value["fileTags"] = [ + t + for t in entity.value.get("fileTags", []) + if not (t["path"] in tagged_paths) + ] entity.save() return entity diff --git a/designsafe/apps/api/projects_v2/operations/project_publish_operations.py b/designsafe/apps/api/projects_v2/operations/project_publish_operations.py index dbcc8ef511..acdfa19f05 100644 --- a/designsafe/apps/api/projects_v2/operations/project_publish_operations.py +++ b/designsafe/apps/api/projects_v2/operations/project_publish_operations.py @@ -9,6 +9,7 @@ import logging from django.conf import settings import networkx as nx +from celery import shared_task from designsafe.apps.api.projects_v2 import constants from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata @@ -17,8 +18,12 @@ publish_datacite_doi, upsert_datacite_json, ) - +from designsafe.apps.api.projects_v2.operations.project_archive_operations import ( + archive_publication_async, +) from designsafe.apps.api.publications_v2.models import Publication +from designsafe.apps.api.publications_v2.elasticsearch import index_publication +from designsafe.apps.data.tasks import agave_indexer logger = logging.getLogger(__name__) @@ -45,8 +50,32 @@ constants.HYBRID_SIM_SIM_SUBSTRUCTURE, constants.HYBRID_SIM_EXP_SUBSTRUCTURE, ], + constants.FIELD_RECON_REPORT: [], } +ENTITIES_WITH_REQUIRED_FILES = [ + constants.EXPERIMENT_MODEL_CONFIG, + constants.EXPERIMENT_SENSOR, + constants.EXPERIMENT_EVENT, + constants.SIMULATION_MODEL, + constants.SIMULATION_INPUT, + constants.SIMULATION_OUTPUT, + constants.SIMULATION_REPORT, + constants.FIELD_RECON_SOCIAL_SCIENCE, + constants.FIELD_RECON_GEOSCIENCE, + constants.FIELD_RECON_PLANNING, + constants.HYBRID_SIM_GLOBAL_MODEL, + constants.HYBRID_SIM_COORDINATOR, + constants.HYBRID_SIM_SIM_SUBSTRUCTURE, + constants.HYBRID_SIM_EXP_SUBSTRUCTURE, + constants.HYBRID_SIM_COORDINATOR_OUTPUT, + constants.HYBRID_SIM_EXP_OUTPUT, + constants.HYBRID_SIM_SIM_OUTPUT, + constants.FIELD_RECON_REPORT, + constants.HYBRID_SIM_REPORT, + constants.HYBRID_SIM_ANALYSIS, +] + def check_missing_entities( project_id: str, entity_uuid: str, default_operator: Literal["AND", "OR"] = "AND" @@ -57,8 +86,7 @@ def check_missing_entities( (e.g. field recon missions require a planning, social science, OR geoscience colleciton but not all 3) """ - project_tree = ProjectMetadata.get_project_by_id(project_id) - project_graph: nx.DiGraph = nx.node_link_graph(project_tree.project_graph.value) + project_graph: nx.DiGraph = add_values_to_tree(project_id) entity_node = next( ( @@ -74,6 +102,7 @@ def check_missing_entities( project_graph.nodes[node] for node in nx.dfs_preorder_nodes(project_graph, entity_node) ] + logger.debug(child_nodes) missing_entities = [] for required_entity_name in REQUIRED_ENTITIES.get(entity_name, []): @@ -90,7 +119,17 @@ def check_missing_entities( # At least one of the required entity types is associated missing_entities = [] - return missing_entities + # Check for entities with missing files: + missing_file_objs = [] + for child_node in child_nodes: + if child_node["name"] in ENTITIES_WITH_REQUIRED_FILES and not child_node[ + "value" + ].get("fileObjs", []): + missing_file_objs.append( + {"name": child_node["name"], "title": child_node["value"]["title"]} + ) + + return missing_entities, missing_file_objs def validate_entity_selection(project_id: str, entity_uuids: list[str]): @@ -100,7 +139,9 @@ def validate_entity_selection(project_id: str, entity_uuids: list[str]): entity_meta = ProjectMetadata.objects.get(uuid=uuid) match entity_meta.name: case constants.EXPERIMENT | constants.SIMULATION | constants.HYBRID_SIM: - missing_entities = check_missing_entities(project_id, uuid) + missing_entities, missing_file_objs = check_missing_entities( + project_id, uuid + ) if len(missing_entities) > 0: validation_errors.append( { @@ -110,8 +151,17 @@ def validate_entity_selection(project_id: str, entity_uuids: list[str]): "missing": missing_entities, } ) - case constants.FIELD_RECON_MISSION: - missing_entities = check_missing_entities( + for missing_file_obj in missing_file_objs: + validation_errors.append( + { + "errorType": "MISSING_FILES", + "name": missing_file_obj["name"], + "title": missing_file_obj["title"], + } + ) + + case constants.FIELD_RECON_MISSION | constants.FIELD_RECON_REPORT: + missing_entities, missing_file_objs = check_missing_entities( project_id, uuid, default_operator="OR" ) if len(missing_entities) > 0: @@ -123,6 +173,14 @@ def validate_entity_selection(project_id: str, entity_uuids: list[str]): "missing": missing_entities, } ) + for missing_file_obj in missing_file_objs: + validation_errors.append( + { + "errorType": "MISSING_FILES", + "name": missing_file_obj["name"], + "title": missing_file_obj["title"], + } + ) return validation_errors @@ -315,40 +373,52 @@ def copy_publication_files( `path_mapping` is a dict mapping project paths to their corresponding paths in the published area. """ - pub_dirname = project_id - if version and version > 1: - pub_dirname = f"{project_id}v{version}" - - pub_root_dir = str(Path(f"{settings.DESIGNSAFE_PUBLISHED_PATH}") / pub_dirname) - os.makedirs(pub_root_dir, exist_ok=True) - - for src_path in path_mapping: - src_path_obj = Path(src_path) - if not src_path_obj.exists(): - raise ProjectFileNotFound(f"File not found: {src_path}") - - os.makedirs(src_path_obj.parent, exist_ok=True) - - if src_path_obj.is_dir(): - shutil.copytree( - src_path, - path_mapping[src_path], - dirs_exist_ok=True, - symlinks=True, - copy_function=shutil.copy, - ) - else: - shutil.copy(src_path, path_mapping[src_path]) - - # Lock the publication directory so that non-root users can only read files and list directories - subprocess.run(["chmod", "-R", "a-x,a=rX", pub_root_dir], check=True) + os.chmod("/corral-repl/tacc/NHERI/published", 0o755) + try: + pub_dirname = project_id + if version and version > 1: + pub_dirname = f"{project_id}v{version}" + + pub_root_dir = str(Path(f"{settings.DESIGNSAFE_PUBLISHED_PATH}") / pub_dirname) + os.makedirs(pub_root_dir, exist_ok=True) + + for src_path in path_mapping: + src_path_obj = Path(src_path) + if not src_path_obj.exists(): + raise ProjectFileNotFound(f"File not found: {src_path}") + + dest_path_obj = Path(path_mapping[src_path]) + os.makedirs(dest_path_obj.parent, exist_ok=True) + + if src_path_obj.is_dir(): + shutil.copytree( + src_path, + path_mapping[src_path], + dirs_exist_ok=True, + copy_function=shutil.copy, + ) + else: + shutil.copy(src_path, path_mapping[src_path]) + + # Lock the publication directory so that non-root users can only read files and list directories + subprocess.run(["chmod", "-R", "a-x,a=rX", pub_root_dir], check=True) + agave_indexer.apply_async( + kwargs={ + "systemId": "designsafe.storage.published", + "filePath": pub_dirname, + "recurse": True, + }, + queue="indexing", + ) + finally: + os.chmod("/corral-repl/tacc/NHERI/published", 0o555) # pylint: disable=too-many-locals, too-many-branches, too-many-statements def publish_project( project_id: str, entity_uuids: list[str], - version: Optional[int] = None, + version: Optional[int] = 1, version_info: Optional[str] = None, dry_run: bool = False, ): @@ -369,6 +439,10 @@ def publish_project( if dry_run: return pub_tree, path_mapping + if not settings.DEBUG: + # Copy files first so if it fails we don't create orphan metadata/datacite entries. + copy_publication_files(path_mapping, project_id, version=version) + new_dois = [] for entity_uuid in entity_uuids: @@ -376,7 +450,7 @@ def publish_project( existing_dois = entity_meta.value.get("dois", []) existing_doi = next(iter(existing_dois), None) - datacite_json = get_datacite_json(pub_tree, entity_uuid) + datacite_json = get_datacite_json(pub_tree, entity_uuid, version) datacite_resp = upsert_datacite_json(datacite_json, doi=existing_doi) doi = datacite_resp["data"]["id"] new_dois.append(doi) @@ -393,7 +467,6 @@ def publish_project( pub_tree.nodes[node]["value"]["dois"] = [doi] if not settings.DEBUG: - copy_publication_files(path_mapping, project_id) for doi in new_dois: publish_datacite_doi(doi) @@ -413,4 +486,85 @@ def publish_project( ) pub_metadata.save() + index_publication(project_id) + if not settings.DEBUG: + archive_publication_async.apply_async( + args=[project_id, version], queue="default" + ) + return pub_metadata + + +@shared_task +def publish_project_async( + project_id: str, + entity_uuids: list[str], + version: Optional[int] = 1, + version_info: Optional[str] = None, + dry_run: bool = False, +): + """Async wrapper arount publication""" + publish_project(project_id, entity_uuids, version, version_info, dry_run) + + +def amend_publication(project_id: str): + """ + Update metadata values in a publication to match the latest changes made in the + underlying project. Does NOT affect file associations or tags. + """ + + pub_root = Publication.objects.get(project_id=project_id) + pub_tree: nx.DiGraph = nx.node_link_graph(pub_root.tree) + latest_version = max( + pub_tree.nodes[node]["version"] for node in pub_tree.successors("NODE_ROOT") + ) + pubs_to_amend = [ + node + for node in pub_tree.successors("NODE_ROOT") + if pub_tree.nodes[node]["version"] == latest_version + ] + + for pub_node in pubs_to_amend: + for node in nx.dfs_preorder_nodes(pub_tree, pub_node): + uuid = pub_tree.nodes[node]["uuid"] + published_meta_value = pub_tree.nodes[node]["value"] + try: + prj_meta_value = ProjectMetadata.objects.get(uuid=uuid).value + prj_meta_value.pop("fileObjs", None) + prj_meta_value.pop("fileTags", None) + amended_meta_value = {**published_meta_value, **prj_meta_value} + pub_tree.nodes[node]["value"] = amended_meta_value + except ProjectMetadata.DoesNotExist: + continue + + base_prj_meta_value = ProjectMetadata.get_project_by_id(project_id).value + base_prj_meta_value.pop("fileObjs", None) + base_prj_meta_value.pop("fileTags", None) + + # If not type Other, we also amend the NODE_ROOT metadata. + if pub_tree.nodes["NODE_ROOT"].get("uuid", None): + base_published_meta_value = pub_tree.nodes["NODE_ROOT"]["value"] + amended_root_meta_value = {**base_published_meta_value, **base_prj_meta_value} + pub_tree.nodes["NODE_ROOT"]["value"] = amended_root_meta_value + + pub_root.tree = nx.node_link_data(pub_tree) + pub_root.value = {**pub_root.value, **base_prj_meta_value} + pub_root.save() + + # Update datacite metadata + for node in pubs_to_amend: + datacite_json = get_datacite_json( + pub_tree, pub_tree.nodes[node]["uuid"], latest_version + ) + upsert_datacite_json( + datacite_json, doi=pub_tree.nodes[node]["value"]["dois"][0] + ) + + # Index publication in Elasticsearch + index_publication(project_id) + + +@shared_task +def amend_publication_async(project_id: str): + """async wrapper around amend_publication""" + amend_publication(project_id) diff --git a/designsafe/apps/api/projects_v2/operations/project_system_operations.py b/designsafe/apps/api/projects_v2/operations/project_system_operations.py index af5e2966dc..367323e40f 100644 --- a/designsafe/apps/api/projects_v2/operations/project_system_operations.py +++ b/designsafe/apps/api/projects_v2/operations/project_system_operations.py @@ -1 +1,217 @@ """Utilities for creating project systems and managing access permissions.""" + +# from portal.utils.encryption import createKeyPair +import logging +from typing import Literal +from tapipy.tapis import Tapis +from django.conf import settings +import celery +from designsafe.apps.api.agave import service_account +from celery import shared_task + +# from portal.apps.onboarding.steps.system_access_v3 import create_system_credentials, register_public_key + + +logger = logging.getLogger(__name__) + + +def set_workspace_permissions( + client: Tapis, username: str, system_id: str, role: str = "writer" +): + """Apply read/write/execute permissions to a user on a system.""" + + system_pems = {"reader": ["READ", "EXECUTE"], "writer": ["READ", "EXECUTE"]} + + files_pems = {"reader": "READ", "writer": "MODIFY"} + + logger.info(f"Adding {username} permissions to Tapis system {system_id}") + client.systems.grantUserPerms( + systemId=system_id, userName=username, permissions=system_pems[role] + ) + + if role == "reader": + client.systems.revokeUserPerms( + systemId=system_id, userName=username, permissions=["MODIFY"] + ) + client.files.deletePermissions(systemId=system_id, path="/", username=username) + + client.files.grantPermissions( + systemId=system_id, path="/", username=username, permission=files_pems[role] + ) + + +def set_workspace_acls(client, system_id, path, username, operation, role="writer"): + operation_map = {"add": "ADD", "remove": "REMOVE"} + + acl_string_map = { + "reader": f"d:u:{username}:rX,u:{username}:rX", + "writer": f"d:u:{username}:rwX,u:{username}:rwX", + "none": f"d:u:{username},u:{username}", + } + + client.files.setFacl( + systemId=system_id, + path=path, + operation=operation_map[operation], + recursionMethod="PHYSICAL", + aclString=acl_string_map[role], + ) + + +def submit_workspace_acls_job( + username: str, project_uuid: str, action=Literal["add", "remove"] +): + """ + Submit a job to set ACLs on a project for a specific user. This should be used if + we are setting ACLs on an existing project, since there might be too many files for + the synchronous Tapis endpoint to be performant. + """ + client = service_account() + + job_body = { + "name": f"setfacl-project-{project_uuid.split('-')[0]}-{username}-{action}", + "appId": "setfacl-corral-tg458981", + "appVersion": "0.0.1", + "description": "test app setfacl-corral-tg458981", + "fileInputs": [], + "parameterSet": { + "appArgs": [], + "schedulerOptions": [], + "envVariables": [ + {"key": "username", "value": username}, + { + "key": "directory", + "value": f"/corral/projects/NHERI/projects/{project_uuid}", + }, + {"key": "action", "value": action}, + ], + }, + "tags": ["portalName:designsafe"], + } + res = client.jobs.submitJob(**job_body) + return res + + +def create_workspace_dir(project_uuid: str) -> str: + client = service_account() + system_id = "designsafe.storage.projects" + path = f"{project_uuid}" + client.files.mkdir(systemId=system_id, path=path) + return path + + +def create_workspace_system(client, project_uuid: str) -> str: + system_id = f"project-{project_uuid}" + system_args = { + "id": system_id, + "host": "cloud.corral.tacc.utexas.edu", + "port": 22, + "systemType": "LINUX", + "defaultAuthnMethod": "PKI_KEYS", + "canExec": False, + "rootDir": f"/corral-repl/projects/NHERI/projects/{project_uuid}", + "effectiveUserId": "tg458981", + "authnCredential": { + "privateKey": settings.PROJECT_STORAGE_SYSTEM_CREDENTIALS["privateKey"], + "publicKey": settings.PROJECT_STORAGE_SYSTEM_CREDENTIALS["username"], + }, + } + + client.systems.createSystem(**system_args) + return system_id + + +def increment_workspace_count(force=None) -> int: + client = service_account() + root_sys = client.systems.getSystem(systemId="designsafe.storage.projects") + new_count = int(root_sys.notes.count) + 1 + + # Allow manual setting of the increment. + if force: + new_count = force + + client.systems.patchSystem( + systemId="designsafe.storage.projects", notes={"count": new_count} + ) + return new_count + + +########################################## +# HIGH-LEVEL OPERATIONS TIED TO API ROUTES +########################################## + + +def setup_project_file_system(project_uuid: str, users: list[str]): + """ + Create a workspace system owned by user whose client is passed. + """ + service_client = service_account() + + # Service client creates directory and gives owner write permissions + create_workspace_dir(project_uuid) + + # User creates the system and adds their credential + resp = create_workspace_system(service_client, project_uuid) + + for username in users: + add_user_to_project_async.apply_async(args=[project_uuid, username]) + + return resp + + +def add_user_to_project(project_uuid: str, username: str, set_acls=True): + """ + Give a user POSIX and Tapis permissions on a workspace system. + """ + service_client = service_account() + system_id = f"project-{project_uuid}" + logger.debug("Adding user %s to system %s", username, system_id) + if set_acls: + job_res = submit_workspace_acls_job(username, project_uuid, action="add") + logger.debug( + "Submitted workspace ACL job %s with UUID %s", job_res.name, job_res.uuid + ) + service_client.systems.shareSystem(systemId=system_id, users=[username]) + set_workspace_permissions(service_client, username, system_id, role="writer") + + return project_uuid + + +def remove_user_from_project(project_uuid: str, username: str): + """ + Unshare the system and remove all permissions and credentials. + """ + service_client = service_account() + system_id = f"project-{project_uuid}" + logger.debug("Removing user %s from system %s", username, system_id) + job_res = submit_workspace_acls_job(username, project_uuid, action="remove") + logger.debug( + "Submitted workspace ACL job %s with UUID %s", job_res.name, job_res.uuid + ) + + service_client.systems.unShareSystem(systemId=system_id, users=[username]) + service_client.systems.revokeUserPerms( + systemId=system_id, userName=username, permissions=["READ", "MODIFY", "EXECUTE"] + ) + service_client.files.deletePermissions( + systemId=system_id, username=username, path="/" + ) + + return project_uuid + + +########################################## +# ASYNC TASKS FOR USER ADDITION/REMOVAL +########################################## + + +@shared_task(bind=True) +def add_user_to_project_async(self, project_uuid: str, username: str): + """Async wrapper around add_user_to_project""" + add_user_to_project(project_uuid, username) + + +@shared_task(bind=True) +def remove_user_from_project_async(self, project_uuid: str, username: str): + """Async wrapper around remove_user_from_project""" + remove_user_from_project(project_uuid, username) diff --git a/designsafe/apps/api/projects_v2/schema_models/_field_models.py b/designsafe/apps/api/projects_v2/schema_models/_field_models.py index d138b305d1..69223c3a6a 100644 --- a/designsafe/apps/api/projects_v2/schema_models/_field_models.py +++ b/designsafe/apps/api/projects_v2/schema_models/_field_models.py @@ -26,6 +26,10 @@ def model_dump(self, *args, **kwargs): *args, **kwargs ) + def to_fedora_json(self) -> dict: + """Placeholder method for formatting metadata for Fedora.""" + return {} + class ProjectUser(MetadataModel): """Model for project users.""" @@ -56,7 +60,7 @@ def from_username(cls, username: str, role: str = "team_member", **kwargs): email=user_obj.email, inst=user_obj.profile.institution, role=role, - **kwargs + **kwargs, ) except user_model.DoesNotExist: try: @@ -71,7 +75,7 @@ def from_username(cls, username: str, role: str = "team_member", **kwargs): email=tas_user["email"], inst=tas_user["institution"], role=role, - **kwargs + **kwargs, ) # pylint:disable=broad-exception-caught except Exception as _: @@ -96,9 +100,9 @@ class ProjectAward(MetadataModel): """Model for awards.""" order: int = 0 - name: Annotated[ - str, BeforeValidator(lambda n: n if isinstance(n, str) else "") - ] = "" + name: Annotated[str, BeforeValidator(lambda n: n if isinstance(n, str) else "")] = ( + "" + ) number: str = "" funding_source: Optional[str] = None @@ -117,6 +121,15 @@ class AssociatedProject(MetadataModel): # Some legacy projects have a doi attribute. doi: str = "" + def to_fedora_json(self) -> dict: + if self.type == "Linked Dataset": + return {"isPartOf": f"{self.title} ({self.href})"} + if self.type == "Context": + return {"references": f"{self.title} ({self.href})"} + if self.type == "Cited By": + return {"isReferencedBy": f"{self.title} ({self.href})"} + return {} + class ReferencedWork(MetadataModel): """Model for referenced works.""" @@ -125,6 +138,9 @@ class ReferencedWork(MetadataModel): doi: str = Field(validation_alias=AliasChoices("doi", "url")) href_type: str = "URL" + def to_fedora_json(self): + return {"references": f"{self.title} ({self.doi})"} + class FileTag(MetadataModel): """Model for file tags.""" diff --git a/designsafe/apps/api/projects_v2/schema_models/base.py b/designsafe/apps/api/projects_v2/schema_models/base.py index ad39660b79..d042b8d97b 100644 --- a/designsafe/apps/api/projects_v2/schema_models/base.py +++ b/designsafe/apps/api/projects_v2/schema_models/base.py @@ -1,4 +1,5 @@ """Pydantic schema models for base-level project entities.""" + from datetime import datetime from typing import Literal, Optional, Annotated from pydantic import ( @@ -173,3 +174,63 @@ def post_validate(self): if (not self.users) and (users := self.construct_users()): self.users = users return self + + def to_fedora_json(self): + """format project metadata for Fedora""" + fedora_json = {} + fedora_json["title"] = self.title + fedora_json["description"] = self.description + fedora_json["identifier"] = [ + self.project_id, + f"https://www.designsafe-ci.org/data/browser/public/designsafe.storage.published/{self.project_id}", + ] + if self.dois: + fedora_json["identifier"] += self.dois + + fedora_json["coverage"] = [] + for nh_event in self.nh_events: + if nh_event.event_start: + fedora_json["coverage"].append(nh_event.event_start.isoformat()) + if nh_event.event_end: + fedora_json["coverage"].append(nh_event.event_end.isoformat()) + fedora_json["coverage"].append(nh_event.location) + + fedora_json["subject"] = self.keywords + if self.nh_event: + fedora_json["subject"].append(self.nh_event) + if self.fr_types: + fedora_json["subject"] += [t.name for t in self.fr_types] + if self.nh_types: + fedora_json["subject"] += [t.name for t in self.nh_types] + fedora_json["subject"] = [s for s in fedora_json["subject"] if s] + + fedora_json["contributors"] = [] + for award in self.award_numbers: + fedora_json["contributors"].append(award.name) + fedora_json["contributors"].append(award.number) + for facility in self.facilities: + fedora_json["contributors"].append(facility.name) + fedora_json["contributors"] = [c for c in fedora_json["contributors"] if c] + + fedora_json["type"] = self.project_type + if self.project_type == "other": + fedora_json["type"] = [t.name for t in self.data_types] + + fedora_json["creator"] = [ + f"{author.lname}, {author.fname}" for author in self.authors + ] + if self.license: + fedora_json["license"] = self.license + fedora_json["publisher"] = "Designsafe" + + for referenced_data in self.referenced_data: + reference_mapping = referenced_data.to_fedora_json() + for key in reference_mapping: + fedora_json[key] = fedora_json.get(key, []) + [reference_mapping[key]] + + for related_work in self.associated_projects: + related_mapping = related_work.to_fedora_json() + for key in related_mapping: + fedora_json[key] = fedora_json.get(key, []) + [related_mapping[key]] + + return fedora_json diff --git a/designsafe/apps/api/projects_v2/schema_models/experimental.py b/designsafe/apps/api/projects_v2/schema_models/experimental.py index 491ef6ce48..07a60bc3c8 100644 --- a/designsafe/apps/api/projects_v2/schema_models/experimental.py +++ b/designsafe/apps/api/projects_v2/schema_models/experimental.py @@ -1,4 +1,5 @@ """Pydantic models for Experimental entities""" + import itertools from typing import Optional, Annotated from pydantic import BeforeValidator, Field, ConfigDict, model_validator, AliasChoices @@ -57,8 +58,8 @@ class Experiment(MetadataModel): ] = None equipment_type_other: str = Field(default="", exclude=True) - procedure_start: str = "" - procedure_end: str = "" + procedure_start: Optional[str] = None + procedure_end: Optional[str] = None authors: Annotated[list[ProjectUser], BeforeValidator(handle_legacy_authors)] = [] project: list[str] = [] @@ -89,6 +90,40 @@ def handle_other(self): self.facility.name = self.experimental_facility_other return self + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "title": self.title, + "description": self.description, + "publisher": "Designsafe", + } + fedora_json["creator"] = [ + f"{author.lname}, {author.fname}" for author in self.authors + ] + if self.experiment_type: + fedora_json["type"] = self.experiment_type.name + fedora_json["identifier"] = self.dois + if self.facility: + fedora_json["contributor"] = self.facility.name + + if self.equipment_type: + fedora_json["subject"] = self.equipment_type.name + + if self.procedure_start: + fedora_json["_created"] = self.procedure_start + + for referenced_data in self.referenced_data: + reference_mapping = referenced_data.to_fedora_json() + for key in reference_mapping: + fedora_json[key] = fedora_json.get(key, []) + [reference_mapping[key]] + + for related_work in self.related_work: + related_mapping = related_work.to_fedora_json() + for key in related_mapping: + fedora_json[key] = fedora_json.get(key, []) + [related_mapping[key]] + + return fedora_json + class ExperimentModelConfig(MetadataModel): """Model for model configurations.""" @@ -113,6 +148,14 @@ class ExperimentModelConfig(MetadataModel): spatial: Optional[str] = Field(default=None, exclude=True) coverage: Optional[str] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "model configuration", + "title": self.title, + "description": self.description, + } + class ExperimentSensor(MetadataModel): """Model for sensors.""" @@ -142,6 +185,14 @@ class ExperimentSensor(MetadataModel): # This field ONLY Present on sensor 8078182091498319385-242ac11c-0001-012 load: Optional[list[str]] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "sensor information", + "title": self.title, + "description": self.description, + } + class ExperimentEvent(MetadataModel): """Model for experimental events.""" @@ -164,6 +215,10 @@ class ExperimentEvent(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) load: Optional[list[str]] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return {"type": "event", "title": self.title, "description": self.description} + class ExperimentAnalysis(MetadataModel): """Model for experimental analysis.""" @@ -182,10 +237,20 @@ class ExperimentAnalysis(MetadataModel): file_tags: list[FileTag] = [] file_objs: list[FileObj] = [] + dois: list[str] = [] + tags: Optional[dict] = Field(default=None, exclude=True) reference: Optional[str] = Field(default=None, exclude=True) referencedoi: Optional[str] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "analysis", + "title": self.title, + "description": self.description, + } + class ExperimentReport(MetadataModel): """Model for experimental reports.""" @@ -198,3 +263,7 @@ class ExperimentReport(MetadataModel): files: list[str] = [] file_tags: list[FileTag] = [] file_objs: list[FileObj] = [] + + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return {"type": "report", "title": self.title, "description": self.description} diff --git a/designsafe/apps/api/projects_v2/schema_models/field_recon.py b/designsafe/apps/api/projects_v2/schema_models/field_recon.py index f4e48fa646..3c533cd978 100644 --- a/designsafe/apps/api/projects_v2/schema_models/field_recon.py +++ b/designsafe/apps/api/projects_v2/schema_models/field_recon.py @@ -1,4 +1,5 @@ """Pydantic schema models for Field Recon entities""" + from typing import Annotated, Optional import itertools from pydantic import BeforeValidator, Field, AliasChoices @@ -34,8 +35,8 @@ class Mission(MetadataModel): related_work: list[AssociatedProject] = [] event: str = "" - date_start: str = "" - date_end: str = "" + date_start: Optional[str] = None + date_end: Optional[str] = None location: str = "" latitude: str = "" longitude: str = "" @@ -49,6 +50,41 @@ class Mission(MetadataModel): # Deprecate these later facility: Optional[DropdownValue] = None + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "title": self.title, + "description": self.description, + "publisher": "Designsafe", + } + fedora_json["creator"] = [ + f"{author.lname}, {author.fname}" for author in self.authors + ] + + fedora_json["coverage"] = [] + if self.date_start: + fedora_json["coverage"].append(self.date_start) + if self.date_end: + fedora_json["coverage"].append(self.date_end) + if self.location: + fedora_json["coverage"].append(self.location) + + fedora_json["identifier"] = self.dois + if self.facility: + fedora_json["contributor"] = self.facility.name + + for referenced_data in self.referenced_data: + reference_mapping = referenced_data.to_fedora_json() + for key in reference_mapping: + fedora_json[key] = fedora_json.get(key, []) + [reference_mapping[key]] + + for related_work in self.related_work: + related_mapping = related_work.to_fedora_json() + for key in related_mapping: + fedora_json[key] = fedora_json.get(key, []) + [related_mapping[key]] + + return fedora_json + class FieldReconReport(MetadataModel): """Model for field recon reports.""" @@ -62,9 +98,9 @@ class FieldReconReport(MetadataModel): related_work: list[AssociatedProject] = [] file_tags: list[FileTag] = [] - authors: Annotated[ - list[ProjectUser], BeforeValidator(handle_legacy_authors) - ] = Field(default=[], validation_alias=AliasChoices("authors", "dataCollectors")) + authors: Annotated[list[ProjectUser], BeforeValidator(handle_legacy_authors)] = ( + Field(default=[], validation_alias=AliasChoices("authors", "dataCollectors")) + ) guest_data_collectors: list[str] = [] project: list[str] = [] @@ -79,6 +115,34 @@ class FieldReconReport(MetadataModel): tombstone: bool = False + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "title": self.title, + "description": self.description, + "publisher": "Designsafe", + } + # pylint:disable=not-an-iterable + fedora_json["creator"] = [ + f"{author.lname}, {author.fname}" for author in self.authors + ] + + fedora_json["identifier"] = self.dois + if self.facility: + fedora_json["contributor"] = self.facility.name + + for referenced_data in self.referenced_data: + reference_mapping = referenced_data.to_fedora_json() + for key in reference_mapping: + fedora_json[key] = fedora_json.get(key, []) + [reference_mapping[key]] + + for related_work in self.related_work: + related_mapping = related_work.to_fedora_json() + for key in related_mapping: + fedora_json[key] = fedora_json.get(key, []) + [related_mapping[key]] + + return fedora_json + class Instrument(MetadataModel): """model for instruments used in field recon projects.""" @@ -93,9 +157,12 @@ class FieldReconCollection(MetadataModel): title: str description: str = "" - observation_types: list[str | None] = [] - date_start: str = "" - date_end: str = "" + observation_types: Annotated[ + list[DropdownValue], + BeforeValidator(lambda v: handle_dropdown_values(v, FR_OBSERVATION_TYPES)), + ] = [] + date_start: Optional[str] = None + date_end: Optional[str] = None data_collectors: list[ProjectUser] = [] guest_data_collectors: list[str] = [] @@ -124,9 +191,9 @@ class SocialScienceCollection(MetadataModel): unit: str = "" modes: Annotated[list[str], BeforeValidator(handle_array_of_none)] = [] sample_approach: Annotated[list[str], BeforeValidator(handle_array_of_none)] = [] - sample_size: str + sample_size: str = "" date_start: str - date_end: str + date_end: Optional[str] = None data_collectors: list[ProjectUser] = [] location: str = "" latitude: str = "" @@ -148,6 +215,41 @@ class SocialScienceCollection(MetadataModel): # Deprecated test fields methods: list[str | None] = Field(default=[], exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "type": "Social Science/dataset", + "title": self.title, + "description": self.description, + } + fedora_json["subject"] = [] + if self.unit: + fedora_json["subject"] += self.unit + if self.modes: + fedora_json["subject"].append(self.modes) + if self.sample_approach: + fedora_json["subject"] += self.sample_approach + if self.sample_size: + fedora_json["subject"].append(self.sample_size) + for equipment in self.equipment: + fedora_json["subject"].append(equipment.name) + + fedora_json["coverage"] = [] + if self.date_start: + fedora_json["coverage"].append(self.date_start) + if self.date_end: + fedora_json["coverage"].append(self.date_end) + if self.location: + fedora_json["coverage"].append(self.location) + + if self.restriction: + fedora_json["accessRights"] = self.restriction + + fedora_json["contributor"] = [ + f"{author.lname}, {author.fname}" for author in self.data_collectors + ] + return fedora_json + class PlanningCollection(MetadataModel): """Model for planning collections.""" @@ -165,6 +267,18 @@ class PlanningCollection(MetadataModel): file_objs: list[FileObj] = [] file_tags: list[FileTag] = [] + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "type": "Research Planning Collection", + "title": self.title, + "description": self.description, + } + fedora_json["contributor"] = [ + f"{author.lname}, {author.fname}" for author in self.data_collectors + ] + return fedora_json + class GeoscienceCollection(MetadataModel): """Model for geoscience collections.""" @@ -181,7 +295,7 @@ class GeoscienceCollection(MetadataModel): BeforeValidator(lambda v: handle_dropdown_values(v, FR_OBSERVATION_TYPES)), ] = [] date_start: str - date_end: str + date_end: Optional[str] = None location: str = "" latitude: str = "" longitude: str = "" @@ -195,3 +309,26 @@ class GeoscienceCollection(MetadataModel): files: list[str] = [] file_objs: list[FileObj] = [] file_tags: list[FileTag] = [] + + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "type": "Engineering Geosciences Collection", + "title": self.title, + "description": self.description, + } + + fedora_json["subject"] = [] + fedora_json["subject"] += [o.name for o in self.observation_types] + fedora_json["subject"] += [e.name for e in self.equipment] + + fedora_json["coverage"] = [] + if self.date_start: + fedora_json["coverage"] += self.date_start + if self.date_end: + fedora_json["coverage"] += self.date_end + + if self.location: + fedora_json["coverage"].append(self.location) + + return fedora_json diff --git a/designsafe/apps/api/projects_v2/schema_models/hybrid_sim.py b/designsafe/apps/api/projects_v2/schema_models/hybrid_sim.py index 99f1f024ed..f9b2683a09 100644 --- a/designsafe/apps/api/projects_v2/schema_models/hybrid_sim.py +++ b/designsafe/apps/api/projects_v2/schema_models/hybrid_sim.py @@ -1,4 +1,5 @@ """Pydantic schema models for Hybrid Simulation entities""" + from typing import Annotated, Optional from pydantic import BeforeValidator, Field, model_validator from designsafe.apps.api.projects_v2.schema_models._field_models import MetadataModel @@ -29,8 +30,8 @@ class HybridSimulation(MetadataModel): BeforeValidator(lambda v: handle_dropdown_value(v, HYBRID_SIM_TYPES)), ] simulation_type_other: Optional[str] = Field(exclude=True, default=None) - procedure_start: str = "" - procedure_end: str = "" + procedure_start: Optional[str] = None + procedure_end: Optional[str] = None referenced_data: list[ReferencedWork] = [] related_work: list[AssociatedProject] = [] authors: Annotated[list[ProjectUser], BeforeValidator(handle_legacy_authors)] = [] @@ -52,6 +53,34 @@ def handle_other(self): self.simulation_type.name = self.simulation_type_other return self + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "type": self.simulation_type.name, + "title": self.title, + "description": self.description, + } + + fedora_json["creator"] = [ + f"{author.lname}, {author.fname}" for author in self.authors + ] + + fedora_json["identifier"] = self.dois + if self.facility: + fedora_json["contributor"] = self.facility.name + + for referenced_data in self.referenced_data: + reference_mapping = referenced_data.to_fedora_json() + for key in reference_mapping: + fedora_json[key] = fedora_json.get(key, []) + [reference_mapping[key]] + + for related_work in self.related_work: + related_mapping = related_work.to_fedora_json() + for key in related_mapping: + fedora_json[key] = fedora_json.get(key, []) + [related_mapping[key]] + + return fedora_json + class HybridSimGlobalModel(MetadataModel): """Model for hybrid sim global models.""" @@ -67,6 +96,14 @@ class HybridSimGlobalModel(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "global model", + "title": self.title, + "description": self.description, + } + class HybridSimCoordinator(MetadataModel): """Model for coordinators.""" @@ -84,6 +121,14 @@ class HybridSimCoordinator(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "master simulation coordinator", + "title": self.title, + "description": self.description, + } + class HybridSimSimSubstructure(MetadataModel): """Model for simulation substructures.""" @@ -102,6 +147,14 @@ class HybridSimSimSubstructure(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "simulation substructure", + "title": self.title, + "description": self.description, + } + class HybridSimExpSubstructure(MetadataModel): """Model for experimental substructures.""" @@ -119,6 +172,14 @@ class HybridSimExpSubstructure(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "experimental substructure", + "title": self.title, + "description": self.description, + } + class HybridSimCoordinatorOutput(MetadataModel): """Model for coordinator output.""" @@ -136,6 +197,14 @@ class HybridSimCoordinatorOutput(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "coordinator output", + "title": self.title, + "description": self.description, + } + class HybridSimSimOutput(MetadataModel): """Model for coordinator output.""" @@ -154,6 +223,14 @@ class HybridSimSimOutput(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "simulation output", + "title": self.title, + "description": self.description, + } + class HybridSimExpOutput(MetadataModel): """Model for experimental substructure output.""" @@ -172,6 +249,14 @@ class HybridSimExpOutput(MetadataModel): tags: Optional[dict] = Field(default=None, exclude=True) + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "experimental output", + "title": self.title, + "description": self.description, + } + class HybridSimAnalysis(MetadataModel): """Model for hybrid sim analysis entities.""" @@ -190,6 +275,14 @@ class HybridSimAnalysis(MetadataModel): reference: Optional[str] = None referencedoi: Optional[str] = None + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "analysis", + "title": self.title, + "description": self.description, + } + class HybridSimReport(MetadataModel): """Model for hybrid sim reports.""" @@ -204,3 +297,7 @@ class HybridSimReport(MetadataModel): file_objs: list[FileObj] = [] tags: Optional[dict] = Field(default=None, exclude=True) + + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return {"type": "report", "title": self.title, "description": self.description} diff --git a/designsafe/apps/api/projects_v2/schema_models/simulation.py b/designsafe/apps/api/projects_v2/schema_models/simulation.py index a937afb3f0..acf04f1a9c 100644 --- a/designsafe/apps/api/projects_v2/schema_models/simulation.py +++ b/designsafe/apps/api/projects_v2/schema_models/simulation.py @@ -50,6 +50,34 @@ def handle_other(self): self.simulation_type.name = self.simulation_type_other return self + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + fedora_json = { + "type": self.simulation_type.name, + "title": self.title, + "description": self.description, + } + + fedora_json["creator"] = [ + f"{author.lname}, {author.fname}" for author in self.authors + ] + + fedora_json["identifier"] = self.dois + if self.facility: + fedora_json["contributor"] = self.facility.name + + for referenced_data in self.referenced_data: + reference_mapping = referenced_data.to_fedora_json() + for key in reference_mapping: + fedora_json[key] = fedora_json.get(key, []) + [reference_mapping[key]] + + for related_work in self.related_work: + related_mapping = related_work.to_fedora_json() + for key in related_mapping: + fedora_json[key] = fedora_json.get(key, []) + [related_mapping[key]] + + return fedora_json + class SimulationModel(MetadataModel): """Model for a simulation model.""" @@ -69,6 +97,14 @@ class SimulationModel(MetadataModel): file_tags: list[FileTag] = [] file_objs: list[FileObj] = [] + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "simulation model", + "title": self.title, + "description": self.description, + } + class SimulationInput(MetadataModel): """Model for simulation input.""" @@ -85,6 +121,14 @@ class SimulationInput(MetadataModel): file_tags: list[FileTag] = [] file_objs: list[FileObj] = [] + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "simulation input", + "title": self.title, + "description": self.description, + } + class SimulationOutput(MetadataModel): """Model for simulation output.""" @@ -102,6 +146,14 @@ class SimulationOutput(MetadataModel): file_tags: list[FileTag] = [] file_objs: list[FileObj] = [] + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "simulation output", + "title": self.title, + "description": self.description, + } + class SimulationAnalysis(MetadataModel): """Model for simulation analysis.""" @@ -121,6 +173,14 @@ class SimulationAnalysis(MetadataModel): reference: Optional[str] = None referencedoi: Optional[str] = None + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return { + "type": "analysis", + "title": self.title, + "description": self.description, + } + class SimulationReport(MetadataModel): """Model for simulation reports.""" @@ -135,3 +195,7 @@ class SimulationReport(MetadataModel): files: list[str] = [] file_tags: list[FileTag] = [] file_objs: list[FileObj] = [] + + def to_fedora_json(self): + """Metadata representation for the Fedora repository""" + return {"type": "report", "title": self.title, "description": self.description} diff --git a/designsafe/apps/api/projects_v2/tasks.py b/designsafe/apps/api/projects_v2/tasks.py new file mode 100644 index 0000000000..0a280b7390 --- /dev/null +++ b/designsafe/apps/api/projects_v2/tasks.py @@ -0,0 +1,65 @@ +"""Async tasks related to project creation/management.""" + +from celery import shared_task +from django.conf import settings +from django.contrib.auth import get_user_model +from django.core.mail import send_mail + +# pylint: disable=unused-import +from designsafe.apps.api.projects_v2.operations.project_system_operations import ( + add_user_to_project_async, + remove_user_from_project_async, +) +from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata +from designsafe.apps.api.projects_v2.operations.project_publish_operations import ( + publish_project_async, + amend_publication_async, +) +from designsafe.apps.api.projects_v2.operations.project_archive_operations import ( + archive_publication_async, +) + + +@shared_task(max_retries=3, default_retry_delay=60) +def alert_sensitive_data(project_id, username): + """ + contact project admins regarding publication of sensitive information + """ + project = ProjectMetadata.get_project_by_id(project_id) + admins = settings.PROJECT_ADMINS_EMAIL + user = get_user_model().objects.get(username=username) + + for admin in admins: + email_body = """ +

    Hello,

    +

    + The following Field Research project has been created with the intent of publishing sensitive information: +
    + {prjID} - {title} +

    +

    + Contact PI: +
    + {name} - {email} +

    +

    + Link to Project: +
    + {url}. +

    + This is a programmatically generated message. Do NOT reply to this message. + """.format( + name=user.get_full_name(), + email=user.email, + title=project.value["title"], + prjID=project_id, + url=f"https://designsafe-ci.org/data/browser/projects/{project_id}", + ) + + send_mail( + "DesignSafe PII Alert", + email_body, + settings.DEFAULT_FROM_EMAIL, + [admin], + html_message=email_body, + ) diff --git a/designsafe/apps/api/projects_v2/tests/schema_integration.py b/designsafe/apps/api/projects_v2/tests/schema_integration.py index 04dc4c0e1b..5f65fc7828 100644 --- a/designsafe/apps/api/projects_v2/tests/schema_integration.py +++ b/designsafe/apps/api/projects_v2/tests/schema_integration.py @@ -11,7 +11,7 @@ from designsafe.apps.api.projects_v2.migration_utils.graph_constructor import ( transform_pub_entities, ) -from designsafe.apps.api.agave import service_account +from designsafe.apps.api.agave import get_service_account_client_v2 as service_account from designsafe.apps.api.publications.operations import listing as list_pubs from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata from designsafe.apps.api.projects_v2.operations.project_publish_operations import ( diff --git a/designsafe/apps/api/projects_v2/urls.py b/designsafe/apps/api/projects_v2/urls.py index 7d398a6328..9a62d775a0 100644 --- a/designsafe/apps/api/projects_v2/urls.py +++ b/designsafe/apps/api/projects_v2/urls.py @@ -22,6 +22,7 @@ path("/preview/", ProjectPreviewView.as_view()), # path("/associations", ProjectsView.as_view), path("entities//", ProjectEntityView.as_view()), + path("/entities/create/", ProjectEntityView.as_view()), path("/entities/ordering/", ProjectEntityOrderView.as_view()), path("/entities/validate/", ProjectEntityValidateView.as_view()), path( diff --git a/designsafe/apps/api/projects_v2/views.py b/designsafe/apps/api/projects_v2/views.py index fa41c2b5aa..539ad4183c 100644 --- a/designsafe/apps/api/projects_v2/views.py +++ b/designsafe/apps/api/projects_v2/views.py @@ -4,27 +4,48 @@ import json import networkx as nx from django.http import HttpRequest, JsonResponse +from django.utils.decorators import method_decorator +from django.views.decorators.csrf import csrf_exempt from django.db import models from designsafe.apps.api.views import BaseApiView, ApiException from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata +from designsafe.apps.api.projects_v2.schema_models.base import BaseProject +from designsafe.apps.api.projects_v2.tasks import alert_sensitive_data +from designsafe.apps.api.projects_v2.migration_utils.graph_constructor import ( + ALLOWED_RELATIONS, +) +from designsafe.apps.api.projects_v2 import constants from designsafe.apps.api.projects_v2.operations.graph_operations import ( reorder_project_nodes, add_node_to_project, remove_nodes_from_project, + initialize_project_graph, + remove_nodes_for_entity, ) from designsafe.apps.api.projects_v2.operations.project_meta_operations import ( patch_metadata, add_file_associations, set_file_associations, + create_entity_metadata, + delete_entity, remove_file_associations, set_file_tags, change_project_type, + create_project_metdata, + get_changed_users, ) from designsafe.apps.api.projects_v2.operations.project_publish_operations import ( add_values_to_tree, validate_entity_selection, ) +from designsafe.apps.api.projects_v2.operations.project_system_operations import ( + increment_workspace_count, + setup_project_file_system, + add_user_to_project_async, + remove_user_from_project_async, +) from designsafe.apps.api.projects_v2.schema_models.base import FileObj +from designsafe.apps.api.decorators import tapis_jwt_login logger = logging.getLogger(__name__) @@ -44,6 +65,7 @@ def get_search_filter(query_string): class ProjectsView(BaseApiView): """View for listing and creating projects""" + @method_decorator(tapis_jwt_login) def get(self, request: HttpRequest): """Return the list of projects for a given user.""" offset = int(request.GET.get("offset", 0)) @@ -71,11 +93,33 @@ def get(self, request: HttpRequest): def post(self, request: HttpRequest): """Create a new project.""" - - + user = request.user + if not request.user.is_authenticated: + raise ApiException("Unauthenticated user", status=401) + req_body = json.loads(request.body) + metadata_value = req_body.get("value", {}) + # Projects are initialized as type None + + # increment project count + prj_number = increment_workspace_count() + # create metadata and graph + metadata_value["projectType"] = "None" + metadata_value["projectId"] = f"PRJ-{prj_number}" + project_meta = create_project_metdata(metadata_value) + initialize_project_graph(project_meta.project_id) + project_users = [user.username for user in project_meta.users.all()] + # create project system + setup_project_file_system(project_uuid=project_meta.uuid, users=project_users) + # add users to system + + return JsonResponse({"projectId": project_meta.project_id}) + + +@method_decorator(csrf_exempt, name="dispatch") class ProjectInstanceView(BaseApiView): """View for listing/updating project entities.""" + @method_decorator(tapis_jwt_login) def get(self, request: HttpRequest, project_id: str): """Return all project metadata for a project ID""" user = request.user @@ -109,7 +153,7 @@ def put(self, request: HttpRequest, project_id: str): raise ApiException("Unauthenticated user", status=401) try: - user.projects.get( + project: ProjectMetadata = user.projects.get( models.Q(uuid=project_id) | models.Q(value__projectId=project_id) ) except ProjectMetadata.DoesNotExist as exc: @@ -118,22 +162,28 @@ def put(self, request: HttpRequest, project_id: str): ) from exc # Get the new value from the request data - new_value = request.data.get("new_value") + req_body = json.loads(request.body) + new_value = req_body.get("value", {}) + sensitive_data_option = req_body.get("sensitiveData", False) + if sensitive_data_option: + logger.debug("PROJECT %s INDICATES SENSITIVE DATA", project_id) + alert_sensitive_data.apply_async([project_id, request.user.username]) # Call the change_project_type function to update the project type updated_project = change_project_type(project_id, new_value) - entities = ProjectMetadata.objects.filter(base_project=updated_project) - return JsonResponse( - { - "updatedProject": updated_project.to_dict(), - "entities": [e.to_dict() for e in entities], - "tree": nx.tree_data( - nx.node_link_graph(updated_project.project_graph.value), "NODE_ROOT" - ), - } + users_to_add, users_to_remove = get_changed_users( + BaseProject.model_validate(project.value), + BaseProject.model_validate(updated_project.value), ) + for user_to_add in users_to_add: + add_user_to_project_async.apply_async([project.uuid, user_to_add]) + for user_to_remove in users_to_remove: + remove_user_from_project_async.apply_async([project_id, user_to_remove]) + + return JsonResponse({"result": "OK"}) + @method_decorator(tapis_jwt_login) def patch(self, request: HttpRequest, project_id: str): """Update a project's root metadata""" user = request.user @@ -150,7 +200,18 @@ def patch(self, request: HttpRequest, project_id: str): ) from exc request_body = json.loads(request.body).get("patchMetadata", {}) - patch_metadata(project.uuid, request_body) + prev_metadata = BaseProject.model_validate(project.value) + updated_project = patch_metadata(project.uuid, request_body) + updated_metadata = BaseProject.model_validate(updated_project.value) + + users_to_add, users_to_remove = get_changed_users( + prev_metadata, updated_metadata + ) + for user_to_add in users_to_add: + add_user_to_project_async.apply_async([project.uuid, user_to_add]) + for user_to_remove in users_to_remove: + remove_user_from_project_async.apply_async([project_id, user_to_remove]) + return JsonResponse({"result": "OK"}) @@ -174,6 +235,47 @@ def patch(self, request: HttpRequest, entity_uuid: str): patch_metadata(entity_uuid, request_body) return JsonResponse({"result": "OK"}) + def delete(self, request: HttpRequest, entity_uuid: str): + """Delete an entity's metadata and remove the entity from the graph""" + user = request.user + if not request.user.is_authenticated: + raise ApiException("Unauthenticated user", status=401) + + entity_meta = ProjectMetadata.objects.get(uuid=entity_uuid) + if user not in entity_meta.base_project.users.all(): + raise ApiException( + "User does not have access to the requested project", status=403 + ) + + remove_nodes_for_entity(entity_meta.project_id, entity_uuid) + delete_entity(entity_uuid) + return JsonResponse({"result": "OK"}) + + def post(self, request: HttpRequest, project_id: str): + """Add a new entity to a project""" + + user = request.user + if not request.user.is_authenticated: + raise ApiException("Unauthenticated user", status=401) + + try: + project: ProjectMetadata = user.projects.get( + models.Q(uuid=project_id) | models.Q(value__projectId=project_id) + ) + except ProjectMetadata.DoesNotExist as exc: + raise ApiException( + "User does not have access to the requested project", status=403 + ) from exc + + req_body = json.loads(request.body) + value = req_body.get("value", {}) + name = req_body.get("name", "") + + new_meta = create_entity_metadata(project.project_id, name, value) + if name in ALLOWED_RELATIONS[constants.PROJECT]: + add_node_to_project(project_id, "NODE_ROOT", new_meta.uuid, name) + return JsonResponse({"result": "OK"}) + class ProjectPreviewView(BaseApiView): """View for generating the Publication Preview""" diff --git a/designsafe/apps/api/projects_v2/views_unit_test.py b/designsafe/apps/api/projects_v2/views_unit_test.py new file mode 100644 index 0000000000..a6c5723458 --- /dev/null +++ b/designsafe/apps/api/projects_v2/views_unit_test.py @@ -0,0 +1,65 @@ +import pytest +import json + + +@pytest.mark.django_db +def test_get_project_instance_unauthed(client, project_with_associations): + _, _, project_uuid = project_with_associations + response = client.get(f"/api/projects/v2/{project_uuid}/") + assert response.status_code == 401 + + +@pytest.mark.django_db +def test_get_project_instance(client, authenticated_user, project_with_associations): + _, _, project_uuid = project_with_associations + response = client.get(f"/api/projects/v2/{project_uuid}/") + assert response.status_code == 200 + + +@pytest.mark.django_db +def test_get_project_instance_with_jwt( + client, regular_user_using_jwt, project_with_associations +): + _, _, project_uuid = project_with_associations + response = client.get(f"/api/projects/v2/{project_uuid}/") + assert response.status_code == 200 + + +@pytest.mark.django_db +def test_patch_project_instance_unauthed(client, project_with_associations): + _, _, project_uuid = project_with_associations + map_entry = { + "name": "Name", + "uuid": "1234", + "path": "/something.hazmapper", + "deployment": "test", + } + patch_data = {"patchMetadata": {"hazmapperMaps": [map_entry]}} + response = client.patch( + f"/api/projects/v2/{project_uuid}/", + data=json.dumps(patch_data), + content_type="application/json", + ) + assert response.status_code == 401 + + +@pytest.mark.django_db +def test_patch_project_instance_with_jwt( + client, regular_user_using_jwt, project_with_associations +): + project, _, project_uuid = project_with_associations + map_entry = { + "name": "Name", + "uuid": "1234", + "path": "/something.hazmapper", + "deployment": "test", + } + patch_data = {"patchMetadata": {"hazmapperMaps": [map_entry]}} + response = client.patch( + f"/api/projects/v2/{project_uuid}/", + data=json.dumps(patch_data), + content_type="application/json", + ) + assert response.status_code == 200 + project.refresh_from_db() + assert [map_entry] == project.value["hazmapperMaps"] diff --git a/designsafe/apps/api/publications/operations.py b/designsafe/apps/api/publications/operations.py index 35a734a42c..5bff9556c2 100644 --- a/designsafe/apps/api/publications/operations.py +++ b/designsafe/apps/api/publications/operations.py @@ -167,7 +167,7 @@ def metrics(project_id, *args, **kwargs): return metrics_meta -def neeslisting(offset=0, limit=100, limit_fields=True, *args): +def neeslisting(offset=0, limit=100, limit_fields=True, q='', *args): client = new_es_client() pub_query = IndexedPublicationLegacy.search(using=client) pub_query = pub_query.extra(from_=offset, size=limit) @@ -178,8 +178,9 @@ def neeslisting(offset=0, limit=100, limit_fields=True, *args): ) res = pub_query.execute() hits = list(map(lambda h: h.to_dict(), res.hits)) - - return {'listing': hits} + if q: + return neessearch(offset=offset, limit=limit, query_string=q) + return {'listing': hits, 'total': res.hits.total.value} def neessearch(offset=0, limit=100, query_string='', limit_fields=True, *args): @@ -203,7 +204,7 @@ def neessearch(offset=0, limit=100, query_string='', limit_fields=True, *args): ) res = pub_query.execute() hits = list(map(lambda h: h.to_dict(), res.hits)) - return {'listing': hits} + return {'listing': hits, 'total': res.hits.total.value} def description(project_id, revision=None, *args): diff --git a/designsafe/apps/api/publications/urls.py b/designsafe/apps/api/publications/urls.py index c2ac84aacd..f5c64b7ad5 100644 --- a/designsafe/apps/api/publications/urls.py +++ b/designsafe/apps/api/publications/urls.py @@ -1,6 +1,7 @@ from django.urls import re_path as url -from designsafe.apps.api.publications.views import PublicationListingView, PublicationDetailView, PublicationDataCiteView +from designsafe.apps.api.publications.views import PublicationListingView, PublicationDetailView, PublicationDataCiteView, PublicationDataCiteEventsView from django.http import JsonResponse +from django.views.decorators.cache import cache_page urlpatterns = [ @@ -8,7 +9,8 @@ # Browsing: # # GET /listing//// - url(r'^data-cite/(?P[ \S]*)$', PublicationDataCiteView.as_view(), name='publication_datacite'), + url(r'^data-cite/events$', cache_page(60*15)(PublicationDataCiteEventsView.as_view()), name='publication_datacite_usage'), + url(r'^data-cite/(?P\S+)$', cache_page(60*15)(PublicationDataCiteView.as_view()), name='publication_datacite'), url(r'^(?P[\w.-]+)/$', PublicationListingView.as_view(), name='publication_listing'), url(r'^(?P[\w.-]+)/(?P[A-Z\-]+-[0-9]+)(v(?P[0-9]+))?/$', PublicationDetailView.as_view(), name='publication_detail'), url(r'^(?P[\w.-]+)/(?P[\w.-]+)/$', PublicationDetailView.as_view(), name='legacy-publication_detail') diff --git a/designsafe/apps/api/publications/views.py b/designsafe/apps/api/publications/views.py index 931c5ebcea..0207358f3b 100644 --- a/designsafe/apps/api/publications/views.py +++ b/designsafe/apps/api/publications/views.py @@ -5,9 +5,11 @@ from requests.exceptions import HTTPError from designsafe.apps.api.publications import operations from designsafe.apps.projects.managers import datacite as DataciteManager +from django.utils.decorators import method_decorator import json import logging -# Create your views here. +import requests + logger = logging.getLogger(__name__) @@ -42,8 +44,31 @@ def get(self, request, operation, project_id, revision=None): """ class PublicationDataCiteView(BaseApiView): def get(self, request, doi): + url = f'https://api.datacite.org/dois/{doi}' + try: - response = DataciteManager.get_doi(doi) - return JsonResponse(response) + response = requests.get(url) + response.raise_for_status() # Raises an HTTPError for bad responses (4xx and 5xx) + return JsonResponse(response.json()) # Assuming the response is JSON except HTTPError as e: return JsonResponse({'message': str(e)}, status=e.response.status_code) + except Exception as e: + return JsonResponse({'message': str(e)}, status=500) + +""" +View for getting DataCite metrics details from publications. +""" +class PublicationDataCiteEventsView(BaseApiView): + def get(self, request): + doi = request.GET.get('doi', '') + source_id = request.GET.get('source-id', 'datacite-usage') + + url = f'https://api.datacite.org/events?source-id={source_id}&doi={doi}' + + try: + response = requests.get(url) + response.raise_for_status() + events = response.json() + return JsonResponse(events) + except Exception as e: + return JsonResponse({'error': str(e)}, status=500) diff --git a/designsafe/apps/api/publications_v2/elasticsearch.py b/designsafe/apps/api/publications_v2/elasticsearch.py new file mode 100644 index 0000000000..ca0fa34351 --- /dev/null +++ b/designsafe/apps/api/publications_v2/elasticsearch.py @@ -0,0 +1,29 @@ +"""Elasticsearch model for published works""" + +from elasticsearch_dsl import Document +from elasticsearch.exceptions import NotFoundError +from django.conf import settings +from designsafe.apps.api.publications_v2.models import Publication + + +class IndexedPublication(Document): + """Elasticsearch model for published works""" + + # pylint: disable=too-few-public-methods + class Index: + """Index meta settings""" + + name = settings.ES_INDICES["publications_v2"]["alias"] + + +def index_publication(project_id): + """Util to index a publication by its project ID""" + pub = Publication.objects.get(project_id=project_id) + try: + pub_es = IndexedPublication.get(project_id) + pub_es.update(**pub.tree) + + except NotFoundError: + pub_es = IndexedPublication(**pub.tree) + pub_es.meta["id"] = project_id + pub_es.save() diff --git a/designsafe/apps/api/publications_v2/operations/fedora_graph_operations.py b/designsafe/apps/api/publications_v2/operations/fedora_graph_operations.py new file mode 100644 index 0000000000..7b24893eb9 --- /dev/null +++ b/designsafe/apps/api/publications_v2/operations/fedora_graph_operations.py @@ -0,0 +1,299 @@ +"""Utils for constructing Fedora metadata definitions from publications""" + +import urllib +import networkx as nx + +from designsafe.apps.api.publications_v2.models import Publication +from designsafe.apps.api.projects_v2.schema_models import SCHEMA_MAPPING, PATH_SLUGS +from designsafe.apps.api.projects_v2.schema_models.base import BaseProject +from designsafe.apps.api.projects_v2 import constants + +prov_predecessor_mapping = { + # Experimental + constants.EXPERIMENT: {"wasStartedBy": [constants.PROJECT]}, + constants.EXPERIMENT_MODEL_CONFIG: {"wasGeneratedBy": [constants.EXPERIMENT]}, + constants.EXPERIMENT_SENSOR: { + "wasGeneratedBy": [constants.EXPERIMENT], + "wasDerivedFrom": [constants.EXPERIMENT_MODEL_CONFIG], + }, + constants.EXPERIMENT_EVENT: { + "wasGeneratedBy": [constants.EXPERIMENT], + "wasDerivedFrom": [constants.EXPERIMENT_MODEL_CONFIG], + "wasInformedBy": [constants.EXPERIMENT_SENSOR], + }, + constants.EXPERIMENT_REPORT: { + "wasGeneratedBy": [constants.EXPERIMENT], + }, + constants.EXPERIMENT_ANALYSIS: { + "wasGeneratedBy": [constants.EXPERIMENT], + }, + # Hybrid Sim + constants.HYBRID_SIM: {"wasGeneratedBy": [constants.PROJECT]}, + constants.HYBRID_SIM_GLOBAL_MODEL: {"wasGeneratedBy": [constants.HYBRID_SIM]}, + constants.HYBRID_SIM_COORDINATOR: { + "wasGeneratedBy": [constants.HYBRID_SIM], + "wasInfluencedBy": [constants.HYBRID_SIM_GLOBAL_MODEL], + }, + constants.HYBRID_SIM_COORDINATOR_OUTPUT: { + "wasGeneratedBy": [constants.HYBRID_SIM], + "wasInfluencedBy": [constants.HYBRID_SIM_GLOBAL_MODEL], + "wasDerivedFrom": [constants.HYBRID_SIM_COORDINATOR], + }, + constants.HYBRID_SIM_SIM_SUBSTRUCTURE: { + "wasInfluencedBy": [ + constants.HYBRID_SIM_GLOBAL_MODEL, + constants.HYBRID_SIM_COORDINATOR, + ], + "wasGeneratedBy": [constants.HYBRID_SIM], + }, + constants.HYBRID_SIM_SIM_OUTPUT: { + "wasDerivedFrom": [constants.HYBRID_SIM_SIM_SUBSTRUCTURE] + }, + constants.HYBRID_SIM_EXP_SUBSTRUCTURE: { + "wasInfluencedBy": [ + constants.HYBRID_SIM_GLOBAL_MODEL, + constants.HYBRID_SIM_COORDINATOR, + ], + "wasGeneratedBy": [constants.HYBRID_SIM], + }, + constants.HYBRID_SIM_EXP_OUTPUT: { + "wasDerivedFrom": [constants.HYBRID_SIM_EXP_SUBSTRUCTURE] + }, + constants.HYBRID_SIM_ANALYSIS: {"wasGeneratedBy": [constants.HYBRID_SIM]}, + constants.HYBRID_SIM_REPORT: {"wasGenerateBy": [constants.HYBRID_SIM]}, + # Simulation + constants.SIMULATION: {"wasGeneratedBy": [constants.PROJECT]}, + constants.SIMULATION_MODEL: {"wasGeneratedBy": [constants.SIMULATION]}, + constants.SIMULATION_INPUT: { + "wasGeneratedBy": [constants.SIMULATION], + "wasDerivedFrom": [constants.SIMULATION_MODEL], + }, + constants.SIMULATION_OUTPUT: { + "wasGeneratedBy": [constants.SIMULATION], + "wasDerivedFrom": [constants.SIMULATION_MODEL, constants.SIMULATION_INPUT], + }, + constants.SIMULATION_ANALYSIS: {"wasGeneratedBy": [constants.SIMULATION]}, + constants.SIMULATION_REPORT: {"wasGeneratedBy": [constants.SIMULATION]}, + # Field Recon + constants.FIELD_RECON_MISSION: {"wasStartedBy": [constants.PROJECT]}, + constants.FIELD_RECON_REPORT: {"wasStartedBy": [constants.PROJECT]}, + constants.FIELD_RECON_PLANNING: {"wasGeneratedBy": [constants.FIELD_RECON_MISSION]}, + constants.FIELD_RECON_SOCIAL_SCIENCE: { + "wasGeneratedBy": [constants.FIELD_RECON_MISSION] + }, + constants.FIELD_RECON_GEOSCIENCE: { + "wasGeneratedBy": [constants.FIELD_RECON_MISSION] + }, +} + +prov_successor_mapping = { + constants.EXPERIMENT: { + "generated": [ + constants.EXPERIMENT_MODEL_CONFIG, + constants.EXPERIMENT_ANALYSIS, + constants.EXPERIMENT_REPORT, + constants.EXPERIMENT_SENSOR, + constants.EXPERIMENT_EVENT, + ] + }, + constants.HYBRID_SIM: { + "generated": [ + constants.HYBRID_SIM_ANALYSIS, + constants.HYBRID_SIM_REPORT, + constants.HYBRID_SIM_COORDINATOR, + constants.HYBRID_SIM_GLOBAL_MODEL, + constants.HYBRID_SIM_COORDINATOR_OUTPUT, + constants.HYBRID_SIM_SIM_SUBSTRUCTURE, + constants.HYBRID_SIM_SIM_OUTPUT, + constants.HYBRID_SIM_EXP_SUBSTRUCTURE, + constants.HYBRID_SIM_EXP_OUTPUT, + ] + }, + constants.SIMULATION: { + "generated": [ + constants.SIMULATION_ANALYSIS, + constants.SIMULATION_REPORT, + constants.SIMULATION_MODEL, + constants.SIMULATION_INPUT, + constants.SIMULATION_OUTPUT, + ] + }, + constants.FIELD_RECON_MISSION: { + "generated": [ + constants.FIELD_RECON_PLANNING, + constants.FIELD_RECON_SOCIAL_SCIENCE, + constants.FIELD_RECON_GEOSCIENCE, + ] + }, +} + + +def get_node_url_path( + pub_tree: nx.DiGraph, node_id: str, project_id: str, version: str = 1 +): + """Get the path to an entity in Fedora relative to the publication container root.""" + url_path = project_id + if version > 1: + url_path = f"{project_id}v{version}" + + node_path = nx.shortest_path(pub_tree, source="NODE_ROOT", target=node_id) + for path_id in node_path[1:]: + entity_title = pub_tree.nodes[path_id]["value"]["title"] + url_path += f"/{urllib.parse.quote(entity_title)}" + + return url_path + + +def get_predecessor_prov_tags(pub_tree: nx.DiGraph, node_id: str): + """ + Get PROV metadata relating to a node's predecessors. Example return value: + {'wasGeneratedBy': ['Experiment: Particle Image Data (10.17603/ds2-j0b5-5y02)'], + 'wasDerivedFrom': ['Model-config: Culebra, Humacao and Yabucoa'], + 'wasInformedBy': ['Sensor: Particle Image Velocimetry System']} + """ + prov_predecessor_json = {} + node_name = pub_tree.nodes[node_id]["name"] + + node_path = nx.shortest_path(pub_tree, source="NODE_ROOT", target=node_id) + prov_map = prov_predecessor_mapping.get(node_name, {}) + for predecessor_node_id in node_path: + for prov_relation in prov_map: + if pub_tree.nodes[predecessor_node_id]["name"] in prov_map[prov_relation]: + predecessor_node_data = pub_tree.nodes[predecessor_node_id] + predecessor_name = f"{PATH_SLUGS[predecessor_node_data['name']]}: {predecessor_node_data['value']['title']}" + if predecessor_node_data["value"].get("dois"): + predecessor_name += ( + f" ({predecessor_node_data['value']['dois'][0]})" + ) + + prov_predecessor_json[prov_relation] = prov_predecessor_json.get( + prov_relation, [] + ) + [predecessor_name] + + return prov_predecessor_json + + +def get_successor_prov_tags(pub_tree: nx.DiGraph, node_id: str): + """ + Get PROV metadata related to a nodes' successors. Example return value: + {'generated': ['Report: Data Dictionary', + 'Model-config: Culebra, Humacao and Yabucoa', + 'Sensor: Particle Image Velocimetry System', + 'Event: Approach Flow', + 'Event: Culebra Model']} + """ + prov_successor_json = {} + node_name = pub_tree.nodes[node_id]["name"] + prov_map = prov_successor_mapping.get(node_name, {}) + + successors = nx.dfs_preorder_nodes(pub_tree, node_id) + for successor_node_id in successors: + for prov_relation in prov_map: + if pub_tree.nodes[successor_node_id]["name"] in prov_map[prov_relation]: + successor_node_data = pub_tree.nodes[successor_node_id] + successor_name = f"{PATH_SLUGS[successor_node_data['name']]}: {successor_node_data['value']['title']}" + if successor_node_data["value"].get("dois"): + successor_name += f" ({successor_node_data['value']['dois'][0]})" + + prov_successor_json[prov_relation] = prov_successor_json.get( + prov_relation, [] + ) + [successor_name] + + return prov_successor_json + + +def get_project_root_mapping( + project_id, version, pub_tree: nx.DiGraph, node_data: dict +): + """Get Fedora mapping for the project root.""" + + fedora_json = BaseProject.model_validate(node_data["value"]).to_fedora_json() + + publication_date = node_data.get("publicationDate", None) + if publication_date: + fedora_json["available"] = publication_date + + project_mapping = { + "uuid": node_data["uuid"], + "container_path": get_node_url_path(pub_tree, "NODE_ROOT", project_id, version), + "fedora_mapping": fedora_json, + "fileObjs": [], + "fileTags": node_data["value"].get("fileTags", []), + } + return project_mapping + + +def get_fedora_json(project_id: str, version: int = 1): + """ + Returns Fedora mappings and path/file tag information for each entity in a pub. + """ + pub = Publication.objects.get(project_id=project_id) + + pub_tree: nx.DiGraph = nx.node_link_graph(pub.tree) + + pub_root = pub_tree.nodes["NODE_ROOT"] + + fedora_json_mappings = [] + + # Handle type Other + if not pub_root.get("name"): + base_project_node_data = next( + ( + pub_tree.nodes[e] + for e in pub_tree.successors("NODE_ROOT") + if pub_tree.nodes[e]["version"] == version + ), + None, + ) + project_mapping = get_project_root_mapping( + project_id, version, pub_tree, base_project_node_data + ) + + fedora_json_mappings.append(project_mapping) + return fedora_json_mappings + + # Handle non-Other pubs with entity trees + + base_project_mapping = get_project_root_mapping( + project_id, version, pub_tree, pub_tree.nodes["NODE_ROOT"] + ) + + fedora_json_mappings.append(base_project_mapping) + + published_entities_with_version = [ + e + for e in pub_tree.successors("NODE_ROOT") + if pub_tree.nodes[e]["version"] == version + ] + + for entity_node in published_entities_with_version: + dfs_nodes = nx.dfs_preorder_nodes(pub_tree, entity_node) + for dfs_node_id in dfs_nodes: + entity_meta = pub_tree.nodes[dfs_node_id] + + fedora_mapping = ( + SCHEMA_MAPPING[entity_meta["name"]] + .model_validate(entity_meta["value"]) + .to_fedora_json() + ) + fedora_mapping = { + **fedora_mapping, + **get_predecessor_prov_tags(pub_tree, dfs_node_id), + **get_successor_prov_tags(pub_tree, dfs_node_id), + } + + if not fedora_mapping.get("identifier"): + fedora_mapping["identifier"] = entity_meta["uuid"] + + fedora_json_mappings.append( + { + "uuid": entity_meta["uuid"], + "container_path": get_node_url_path( + pub_tree, dfs_node_id, project_id, version + ), + "fedora_mapping": fedora_mapping, + "fileObjs": entity_meta["value"].get("fileObjs", []), + "fileTags": entity_meta["value"].get("fileTags", []), + } + ) + return fedora_json_mappings diff --git a/designsafe/apps/api/publications_v2/urls.py b/designsafe/apps/api/publications_v2/urls.py index 7684703605..c13b361e6e 100644 --- a/designsafe/apps/api/publications_v2/urls.py +++ b/designsafe/apps/api/publications_v2/urls.py @@ -4,11 +4,17 @@ from designsafe.apps.api.publications_v2.views import ( PublicationListingView, PublicationDetailView, + PublicationPublishView, + PublicationAmendView, + PublicationVersionView, ) urlpatterns = [ path("", PublicationListingView.as_view()), path("/", PublicationListingView.as_view()), + path("publish/", PublicationPublishView.as_view()), + path("amend/", PublicationAmendView.as_view()), + path("version/", PublicationVersionView.as_view()), re_path( r"^(?P[A-Z\-]+-[0-9]+)(v(?P[0-9]+))?/?$", PublicationDetailView.as_view(), diff --git a/designsafe/apps/api/publications_v2/views.py b/designsafe/apps/api/publications_v2/views.py index 96b4f5035a..2db2b4e2bb 100644 --- a/designsafe/apps/api/publications_v2/views.py +++ b/designsafe/apps/api/publications_v2/views.py @@ -1,14 +1,148 @@ """Views for published data""" import logging +import json import networkx as nx +from django.db import models from django.http import HttpRequest, JsonResponse -from designsafe.apps.api.views import BaseApiView +from designsafe.apps.api.views import BaseApiView, ApiException from designsafe.apps.api.publications_v2.models import Publication +from designsafe.apps.api.publications_v2.elasticsearch import IndexedPublication +from designsafe.apps.api.projects_v2.models.project_metadata import ProjectMetadata +from designsafe.apps.api.projects_v2.operations.project_publish_operations import ( + publish_project_async, + amend_publication_async, +) logger = logging.getLogger(__name__) +def handle_search(query_opts: dict, offset=0, limit=100): + from elasticsearch_dsl import Q + + logger.debug(offset) + + query = IndexedPublication.search() + + if project_type_query := query_opts["project-type"]: + query = query.filter("terms", **{"nodes.value.projectType": project_type_query}) + + if facility_query := query_opts["facility"]: + query = query.filter( + Q("term", **{"nodes.value.facility.id.keyword": facility_query}) + | Q("term", **{"nodes.value.facilities.id.keyword": facility_query}) + ) + if nh_type_query := query_opts["nh-type"]: + query = query.filter( + Q("term", **{"nodes.value.nhTypes.id.keyword": nh_type_query}) + | Q("term", **{"nodes.value.nhTypes": nh_type_query}) + ) + + if pub_year_query := query_opts["pub-year"]: + query = query.filter( + Q( + { + "range": { + "nodes.publicationDate": { + "gte": f"{pub_year_query}||/y", + "lte": f"{pub_year_query}||/y", + "format": "yyyy", + } + } + } + ) + ) + + if nh_year_query := query_opts["nh-year"]: + query = query.filter( + Q( + { + "range": { + "nodes.value.nhEventStart": { + "gte": f"{nh_year_query}||/y", + "lte": f"{nh_year_query}||/y", + "format": "yyyy", + } + } + } + ) + ) + + if experiment_type_query := query_opts["experiment-type"]: + query = query.filter( + Q( + "term", + **{"nodes.value.experimentType.id.keyword": experiment_type_query}, + ) + ) + + if sim_type_query := query_opts["sim-type"]: + query = query.filter( + Q( + "term", + **{"nodes.value.simulationType.id.keyword": sim_type_query}, + ) + ) + + if fr_type_query := query_opts["fr-type"]: + query = query.filter( + Q( + "term", + **{"nodes.value.frTypes.id.keyword": fr_type_query}, + ) + ) + + if hyb_sim_type_query := query_opts["hyb-sim-type"]: + query = query.filter( + Q( + "term", + **{"nodes.value.simulationType.id.keyword": hyb_sim_type_query}, + ) + ) + + if data_type_query := query_opts["data-type"]: + query = query.filter( + Q( + "term", + **{"nodes.value.dataType.id.keyword": data_type_query}, + ) + ) + + if search_string := query_opts["q"]: + qs_query = Q( + "query_string", + query=search_string, + default_operator="AND", + type="cross_fields", + fields=[ + "nodes.value.description", + "nodes.value.keywords", + "nodes.value.title", + "nodes.value.projectId", + "nodes.value.projectType", + "nodes.value.dataType", + "nodes.value.authors", + "nodes.value.authors.fname", + "nodes.value.authors.lname", + "nodes.value.authors.username", + "nodes.value.authors.inst", + ], + ) + term_query = Q({"term": {"nodes.value.projectId.keyword": search_string}}) + query = query.filter(qs_query | term_query) + + hits = ( + query.extra(from_=offset, size=limit) + .sort({"nodes.publicationDate": {"order": "desc"}}) + .source([""]) + .execute() + .hits + ) + returned_ids = [hit.meta.id for hit in hits] + + return returned_ids, hits.total.value + + class PublicationListingView(BaseApiView): """List all publications.""" @@ -17,15 +151,42 @@ def get(self, request: HttpRequest): offset = int(request.GET.get("offset", 0)) limit = int(request.GET.get("limit", 100)) - publications = Publication.objects.defer("tree").order_by("-created")[ - offset : offset + limit - ] - total = Publication.objects.count() + # Search/filter params + query_opts = { + "q": request.GET.get("q", None), + "project-type": request.GET.getlist("project-type", []), + "nh-type": request.GET.get("nh-type", None), + "pub-year": request.GET.get("pub-year", None), + "facility": request.GET.get("facility", None), + "experiment-type": request.GET.get("experiment-type", None), + "sim-type": request.GET.get("sim-type", None), + "fr-type": request.GET.get("fr-type", None), + "nh-year": request.GET.get("nh-year", None), + "hyb-sim-type": request.GET.get("hyb-sim-type", None), + "data-type": request.GET.get("data-type", None), + } + + has_query = any(query_opts.values()) + if has_query: + hits, total = handle_search(query_opts, offset, limit) + publications_query = ( + Publication.objects.filter(project_id__in=hits) + .defer("tree") + .order_by("-created") + ) + publications = publications_query + else: + publications_query = Publication.objects.defer("tree").order_by("-created") + total = publications_query.count() + publications = publications_query[offset : offset + limit] result = [ { "projectId": pub.value["projectId"], "title": pub.value["title"], "description": pub.value["description"], + "keywords": pub.value["keywords"], + "type": pub.value["projectType"], + "dataTypes": [t["name"] for t in pub.value.get("dataTypes", None)], "pi": next( (user for user in pub.value["users"] if user["role"] == "pi"), None ), @@ -41,8 +202,119 @@ class PublicationDetailView(BaseApiView): def get(self, request: HttpRequest, project_id, version=None): """Returns the tree view and base project metadata for a publication.""" - pub_meta = Publication.objects.get(project_id=project_id) + try: + pub_meta = Publication.objects.get(project_id=project_id) + except Publication.DoesNotExist as exc: + raise ApiException(status=404, message="Publication not found.") from exc + + pub_tree: nx.DiGraph = nx.node_link_graph(pub_meta.tree) + file_tags = [] + for file_tag_arr in [ + node.get("value", {}).get("fileTags", []) + for (_, node) in pub_tree.nodes.data() + ]: + for tag in file_tag_arr: + file_tags.append(tag) + + tree_json = nx.tree_data(pub_tree, "NODE_ROOT") + + return JsonResponse( + {"tree": tree_json, "fileTags": file_tags, "baseProject": pub_meta.value} + ) + + +class PublicationPublishView(BaseApiView): + """view for publishing a project.""" + + def post(self, request: HttpRequest): + """Create a new publication from a project.""" + user = request.user + request_body = json.loads(request.body) + logger.debug(request_body) + + project_id = request_body.get("projectId", None) + entities_to_publish = request_body.get("entityUuids", None) + + if (not project_id) or (not entities_to_publish): + raise ApiException("Missing project ID or entity list.", status=400) + + try: + user.projects.get( + models.Q(uuid=project_id) | models.Q(value__projectId=project_id) + ) + except ProjectMetadata.DoesNotExist as exc: + raise ApiException( + "User does not have access to the requested project", status=403 + ) from exc + + publish_project_async.apply_async([project_id, entities_to_publish]) + logger.debug(project_id) + logger.debug(entities_to_publish) + return JsonResponse({"result": "OK"}) + + +class PublicationVersionView(BaseApiView): + """view for versioning a project.""" + + def post(self, request: HttpRequest): + """Create a new publication from a project.""" + user = request.user + request_body = json.loads(request.body) + logger.debug(request_body) + + project_id = request_body.get("projectId", None) + entities_to_publish = request_body.get("entityUuids", None) + version_info = request_body.get("versionInfo", None) + + if (not project_id) or (not entities_to_publish): + raise ApiException("Missing project ID or entity list.", status=400) + + try: + user.projects.get( + models.Q(uuid=project_id) | models.Q(value__projectId=project_id) + ) + except ProjectMetadata.DoesNotExist as exc: + raise ApiException( + "User does not have access to the requested project", status=403 + ) from exc + + pub_root = Publication.objects.get(project_id=project_id) + pub_tree: nx.DiGraph = nx.node_link_graph(pub_root.tree) + latest_version = max( + pub_tree.nodes[node]["version"] for node in pub_tree.successors("NODE_ROOT") + ) + + publish_project_async.apply_async( + [project_id, entities_to_publish, latest_version + 1, version_info] + ) + logger.debug(project_id) + logger.debug(entities_to_publish) + return JsonResponse({"result": "OK"}) + + +class PublicationAmendView(BaseApiView): + """view for amemding a project.""" + + def post(self, request: HttpRequest): + """Create a new publication from a project.""" + user = request.user + request_body = json.loads(request.body) + logger.debug(request_body) + + project_id = request_body.get("projectId", None) + + if not project_id: + raise ApiException("Missing project ID.", status=400) - tree_json = nx.tree_data(nx.node_link_graph(pub_meta.tree), "NODE_ROOT") + try: + user.projects.get( + models.Q(uuid=project_id) | models.Q(value__projectId=project_id) + ) + except ProjectMetadata.DoesNotExist as exc: + raise ApiException( + "User does not have access to the requested project", status=403 + ) from exc - return JsonResponse({"tree": tree_json, "baseProject": pub_meta.value}) + amend_publication_async.apply_async([project_id]) + logger.debug(project_id) + return JsonResponse({"result": "OK"}) diff --git a/designsafe/apps/api/search/searchmanager/cms.py b/designsafe/apps/api/search/searchmanager/cms.py index 515c5b18ad..410c85745d 100644 --- a/designsafe/apps/api/search/searchmanager/cms.py +++ b/designsafe/apps/api/search/searchmanager/cms.py @@ -5,12 +5,11 @@ import logging -from future.utils import python_2_unicode_compatible from elasticsearch_dsl import Q, Index from django.conf import settings from designsafe.apps.api.search.searchmanager.base import BaseSearchManager -@python_2_unicode_compatible + class CMSSearchManager(BaseSearchManager): """ Search manager handling CMS data. """ diff --git a/designsafe/apps/api/systems/__init__.py b/designsafe/apps/api/systems/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/designsafe/apps/api/systems/ssh_keys_manager.py b/designsafe/apps/api/systems/ssh_keys_manager.py new file mode 100644 index 0000000000..a47483eeb3 --- /dev/null +++ b/designsafe/apps/api/systems/ssh_keys_manager.py @@ -0,0 +1,146 @@ +""" +.. :module:: apps.accounts.managers.ssh_keys + :synopsis: Manager handling anything pertaining to accounts +""" + +import logging +import paramiko + + +logger = logging.getLogger(__name__) + + +class KeyCannotBeAdded(Exception): + """Key Cannot Be Added Exception + + Exception raised when there is an error adding a public key + to `~/.ssh/authorized_keys` + """ + + def __init__(self, msg, output, error_output, *args, **kwargs): + super().__init__(*args, **kwargs) + self.msg = msg + self.output = output + self.error_output = error_output + + def __str__(self): + return f"{self.msg}: {self.output} \n {self.error_output}" + + +class KeysManager: + # pylint: disable=too-few-public-methods + """Keys Manager + + Class to wrap together any necessary action pertaining to ssh keys + and remote resources. + """ + + def __init__(self, username, password, token): + # pylint: disable=super-init-not-called + """Init""" + self.username = username + self.password = password + self.token = token + + def _tacc_prompt_handler(self, title, instructions, prompt_list): + """TACC Prompt Handler + + This method handles SSH prompts from TACC resources + """ + answers = { + "password": self.password, + "tacc_token_code": self.token, + "tacc_token": self.token, + } + resp = [] + logger.debug("title: %s", title) + logger.debug("instructions: %s", instructions) + logger.debug("list: %s", prompt_list) + for prmpt in prompt_list: + prmpt_str = prmpt[0].lower().strip().replace(" ", "_").replace(":", "") + resp.append(answers[prmpt_str]) + return resp + + def get_transport(self, hostname, port): + """Gets authenticated transport""" + handler = self._tacc_prompt_handler + + trans = paramiko.Transport((hostname, port)) + # trans.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # trans.set_hexdump(True) + trans.use_compression() + # trans.set_keepalive(5) + trans.connect() + trans.auth_interactive_dumb(str(self.username), handler) + return trans + + def _get_pub_key_comment(self, system_id): + """Get Pub Key Comment + + :param str system_id: Agave's system id + + :return str: comment + """ + comment = f"{self.username}@{system_id}" + return comment + + def _get_add_pub_key_command(self, system_id, public_key): + """Get Add Pub Key Command + + :param str system_id: Agave's system id + :param str publick_key: Public Key + + :return str: command + """ + comment = self._get_pub_key_comment(system_id) + string = " ".join([public_key, comment]) + command = ( + 'if [ ! -f "~/.ssh/authorized_keys" ]; then ' + "mkdir -p ~/.ssh/ && touch ~/.ssh/authorized_keys " + "&& chmod 0600 ~/.ssh/authorized_keys; fi && " + 'grep -q -F "{string}" ~/.ssh/authorized_keys || ' + 'echo "{string}" >> ~/.ssh/authorized_keys' + ).format(string=string) + return command + + def add_public_key( + self, system_id, hostname, public_key, port=22, transport=None + ): # pylint: disable=too-many-arguments, arguments-differ + """Adds public key to `authorized_keys` + + :param str sytem_id: System Id + :param str hostname: Hostname + :param str public_key: Public Key + :param int port: Port (optional) + :param transport: Transport object (optional) + """ + if transport is None: + trans = self.get_transport(hostname, port) + else: + trans = transport + channel = trans.open_session() + command = self._get_add_pub_key_command(system_id, public_key) + channel.exec_command(command) + # recv_exit_status blocks until there's an exit status from the + # executed command. + # So, after this we're safe to read stdout and stderr + status = channel.recv_exit_status() + output = channel.makefile() + stderr = channel.makefile_stderr() + output_lines = "" + for line in output.readlines(): + output_lines += line + "\n" + logger.debug(line) + + if status == -1: + logger.info("No response from the server") + elif status == 0: + logger.info(f"Public key added successfully to {hostname}") + elif status > 0: + error_lines = "" + for line in stderr.readlines(): + error_lines += line + "\n" + + raise KeyCannotBeAdded("Error adding public key", output_lines, error_lines) + trans.close() + return output_lines diff --git a/designsafe/apps/api/systems/urls.py b/designsafe/apps/api/systems/urls.py new file mode 100644 index 0000000000..cb1a20ab17 --- /dev/null +++ b/designsafe/apps/api/systems/urls.py @@ -0,0 +1,8 @@ +"""Publication API routes""" + +from django.urls import path +from .views import SystemKeysView + +urlpatterns = [ + path("keys/", SystemKeysView.as_view()), +] diff --git a/designsafe/apps/api/systems/utils.py b/designsafe/apps/api/systems/utils.py new file mode 100644 index 0000000000..984efeba77 --- /dev/null +++ b/designsafe/apps/api/systems/utils.py @@ -0,0 +1,82 @@ +""" +.. :module:: apps.accounts.managers.accounts + :synopsis: Manager handling anything pertaining to accounts +""" + +import logging +from paramiko.ssh_exception import ( + AuthenticationException, + ChannelException, + SSHException, +) +from .ssh_keys_manager import KeysManager, KeyCannotBeAdded + + +logger = logging.getLogger(__name__) + + +# pylint: disable=too-many-arguments +def add_pub_key_to_resource( + user, + password, + token, + system_id, + pub_key, + hostname=None, + port=22, +): + """Add Public Key to Remote Resource + + :param user: Django User object + :param str password: Username's pasword to remote resource + :param str token: TACC's token + :param str system_id: Tapis system's id + :param str hostname: Resource's hostname + :param int port: Port to use for ssh connection + + :raises: :class:`~portal.apps.accounts.managers.` + + """ + success = True + mgr = KeysManager(user, password, token) + message = "add_pub_key_to_resource" + + logger.info(f"Adding public key for user {user.username} on system {system_id}") + try: + if hostname is None: + sys = user.tapis_oauth.client.systems.getSystem(systemId=system_id) + hostname = sys.host + + transport = mgr.get_transport(hostname, port) + message = mgr.add_public_key( + system_id, hostname, pub_key, port=port, transport=transport + ) + status = 200 + + except Exception as base_exc: # pylint: disable=broad-exception-caught + # Catch all exceptions and set a status code for unknown exceptions + success = False + message = str(base_exc) + logger.exception( + message, + extra={"user": user.username}, + ) + + try: + # "Re-throw" exception to get known exception type status codes + raise base_exc + except AuthenticationException: + # Bad password/token + status = 403 # Forbidden + except KeyCannotBeAdded: + # May occur when system is down + message = "KeyCannotBeAdded" # KeyCannnotBeAdded exception does not contain a message? + status = 503 + except (ChannelException, SSHException) as exc: + # cannot ssh to system + message = str( + type(exc) + ) # paramiko exceptions do not contain a string message? + status = 500 # Bad gateway + + return success, message, status diff --git a/designsafe/apps/api/systems/views.py b/designsafe/apps/api/systems/views.py new file mode 100644 index 0000000000..12bbbe9397 --- /dev/null +++ b/designsafe/apps/api/systems/views.py @@ -0,0 +1,56 @@ +""" +.. :module:: designsafe.apps.api.systems.views + :synopsis: Systems views +""" + +import logging +import json +from django.http import JsonResponse +from designsafe.apps.api.views import AuthenticatedApiView +from designsafe.utils.system_access import create_system_credentials +from designsafe.utils.encryption import createKeyPair +from .utils import add_pub_key_to_resource + +logger = logging.getLogger(__name__) + + +class SystemKeysView(AuthenticatedApiView): + """Systems View + + Main view for anything involving a system test + """ + + def post(self, request): + """POST + + :param request: Django's request object + :param str system_id: System id + """ + body = json.loads(request.body) + system_id = body["systemId"] + + logger.info( + f"Resetting credentials for user {request.user.username} on system {system_id}" + ) + (priv_key_str, publ_key_str) = createKeyPair() + + _, result, http_status = add_pub_key_to_resource( + request.user, + password=body["password"], + token=body["token"], + system_id=system_id, + pub_key=publ_key_str, + hostname=body["hostname"], + ) + + create_system_credentials( + request.user.tapis_oauth.client, + request.user.username, + publ_key_str, + priv_key_str, + system_id, + ) + + return JsonResponse( + {"systemId": system_id, "message": result}, status=http_status + ) diff --git a/designsafe/apps/api/tests.py b/designsafe/apps/api/tests.py index 01df8f33fb..1ad34c673e 100644 --- a/designsafe/apps/api/tests.py +++ b/designsafe/apps/api/tests.py @@ -4,7 +4,7 @@ from designsafe.apps.projects.models.agave.experimental import ExperimentalProject, ModelConfig, FileModel -from agavepy.agave import Agave +# from agavepy.agave import Agave import mock import json @@ -17,7 +17,7 @@ # Create your tests here. class ProjectDataModelsTestCase(TestCase): - fixtures = ['user-data.json', 'agave-oauth-token-data.json'] + fixtures = ['user-data.json', 'auth.json'] def setUp(self): user = get_user_model().objects.get(pk=2) diff --git a/designsafe/apps/api/urls.py b/designsafe/apps/api/urls.py index 7af0b1dc91..1c2149757a 100644 --- a/designsafe/apps/api/urls.py +++ b/designsafe/apps/api/urls.py @@ -10,6 +10,8 @@ path("publications/v2", include('designsafe.apps.api.publications_v2.urls')), path("publications/v2/", include('designsafe.apps.api.publications_v2.urls')), + path("systems/", include('designsafe.apps.api.systems.urls')), + url(r'^projects/', include(('designsafe.apps.api.projects.urls', 'designsafe.apps.api.projects'), namespace='ds_projects_api')), diff --git a/designsafe/apps/api/users/utils.py b/designsafe/apps/api/users/utils.py index 0923178d1c..bd51e92e52 100644 --- a/designsafe/apps/api/users/utils.py +++ b/designsafe/apps/api/users/utils.py @@ -1,33 +1,46 @@ +import logging +from pytas.http import TASClient from django.db.models import Q -import logging -import json logger = logging.getLogger(__name__) + +def get_user_data(username): + """Returns user contact information + + : returns: user_data + : rtype: dict + """ + tas_client = TASClient() + user_data = tas_client.get_user(username=username) + return user_data + + def list_to_model_queries(q_comps): query = None if len(q_comps) > 2: - query = Q(first_name__icontains = ' '.join(q_comps[:1])) - query |= Q(first_name__icontains = ' '.join(q_comps[:2])) - query |= Q(last_name__icontains = ' '.join(q_comps[1:])) - query |= Q(last_name__icontains = ' '.join(q_comps[2:])) + query = Q(first_name__icontains=" ".join(q_comps[:1])) + query |= Q(first_name__icontains=" ".join(q_comps[:2])) + query |= Q(last_name__icontains=" ".join(q_comps[1:])) + query |= Q(last_name__icontains=" ".join(q_comps[2:])) else: - query = Q(first_name__icontains = q_comps[0]) - query |= Q(last_name__icontains = q_comps[1]) + query = Q(first_name__icontains=q_comps[0]) + query |= Q(last_name__icontains=q_comps[1]) return query + def q_to_model_queries(q): if not q: return None query = None - if ' ' in q: + if " " in q: q_comps = q.split() query = list_to_model_queries(q_comps) else: - query = Q(email__icontains = q) - query |= Q(first_name__icontains = q) - query |= Q(last_name__icontains = q) + query = Q(email__icontains=q) + query |= Q(first_name__icontains=q) + query |= Q(last_name__icontains=q) return query diff --git a/designsafe/apps/api/users/views.py b/designsafe/apps/api/users/views.py index 792f66181b..0ffa7a833c 100644 --- a/designsafe/apps/api/users/views.py +++ b/designsafe/apps/api/users/views.py @@ -17,9 +17,9 @@ def check_public_availability(username): es_client = new_es_client() - query = Q({'multi_match': {'fields': ['project.value.teamMembers', - 'project.value.coPis', - 'project.value.pi'], + query = Q({'multi_match': {'fields': ['project.value.teamMembers', + 'project.value.coPis', + 'project.value.pi'], 'query': username}}) res = IndexedPublication.search(using=es_client).filter(query).execute() return res.hits.total.value > 0 @@ -50,14 +50,13 @@ def get(self, request): "last_name": u.last_name, "email": u.email, "oauth": { - "access_token": u.agave_oauth.access_token, - "expires_in": u.agave_oauth.expires_in, - "scope": u.agave_oauth.scope, - } + "expires_in": u.tapis_oauth.expires_in, + }, + "isStaff": u.is_staff, } return JsonResponse(out) - return HttpResponse('Unauthorized', status=401) + return JsonResponse({'message': 'Unauthorized'}, status=401) class SearchView(View): @@ -120,7 +119,7 @@ def get(self, request): return JsonResponse(resp, safe=False) else: return HttpResponseNotFound() - + class ProjectUserView(BaseApiView): """View for handling search for project users""" @@ -128,17 +127,17 @@ def get(self, request: HttpRequest): """retrieve a user by their exact TACC username.""" if not request.user.is_authenticated: raise ApiException(message="Authentication required", status=401) - + username_query = request.GET.get("q") user_match = get_user_model().objects.filter(username__iexact=username_query) user_resp = [{"fname": u.first_name, "lname": u.last_name, "inst": u.profile.institution, - "email": u.email, + "email": u.email, "username": u.username} for u in user_match] - + return JsonResponse({"result": user_resp}) - + class PublicView(View): diff --git a/designsafe/apps/api/views.py b/designsafe/apps/api/views.py index 1ee0eb4d0c..d41833c10f 100644 --- a/designsafe/apps/api/views.py +++ b/designsafe/apps/api/views.py @@ -1,57 +1,102 @@ -from django.http.response import HttpResponse, HttpResponseForbidden, JsonResponse +from django.views.decorators.csrf import csrf_exempt from django.views.generic import View -from requests.exceptions import ConnectionError, HTTPError +from django.http import JsonResponse, HttpResponse, Http404 +from django.core.exceptions import PermissionDenied +from django.utils.decorators import method_decorator +from requests.exceptions import HTTPError from .exceptions import ApiException import logging from logging import getLevelName import json +from designsafe.apps.api.decorators import tapis_jwt_login +from tapipy.errors import BaseTapyException logger = logging.getLogger(__name__) class BaseApiView(View): + """Base api view to centralize error logging.""" def dispatch(self, request, *args, **kwargs): """ Dispatch override to centralize error handling. If the error is instance of :class: `ApiException `. - An extra dictionary object will be used when calling `logger.error()`. - This allows to use any information in the `extra` dictionary object on the + An extra dictionary object will be used when calling `logger.error()`. + This allows to use any information in the `extra` dictionary object on the logger output. """ try: - return super(BaseApiView, self).dispatch(request, *args, **kwargs) + return super().dispatch(request, *args, **kwargs) + except (PermissionDenied, Http404) as e: + # log information but re-raise exception to let django handle response + logger.error(e, exc_info=True) + raise e except ApiException as e: - status = e.response.status_code + status = e.response.status_code or 400 message = e.response.reason extra = e.extra - logger.error('{}'.format(message), exc_info=True, extra=extra) - except (ConnectionError, HTTPError) as e: - if e.response: + if status != 404: + logger.error( + "%s: %s", message, e.response.text, exc_info=True, extra=extra + ) + else: + logger.info("Error %s", message, exc_info=True, extra=extra) + return JsonResponse({"message": message}, status=status) + except (ConnectionError, HTTPError, BaseTapyException) as e: + # status code and json content from ConnectionError/HTTPError exceptions + # are used in the returned response. Note: the handling of these two exceptions + # is significant as client-side code make use of these status codes (e.g. error + # responses from tapis are used to determine a tapis storage systems does not exist) + status = 500 + if e.response is not None: status = e.response.status_code - message = e.response.reason - if status not in [403, 404]: - logger.error('%s: %s', message, e.response.text, - exc_info=True, - extra={'username': request.user.username, - 'sessionId': request.session.session_key}) + try: + content = e.response.json() + message = content.get("message", "Unknown Error") + except ValueError: + message = "Unknown Error" + if status in [404, 403]: + logger.warning( + "%s: %s", + message, + e.response.text, + exc_info=True, + extra={ + "username": request.user.username, + "session_key": request.session.session_key, + }, + ) else: - logger.warning('%s: %s', message, e.response.text, - exc_info=True, - extra={'username': request.user.username, - 'sessionId': request.session.session_key}) + logger.error( + "%s: %s", + message, + e.response.text, + exc_info=True, + extra={ + "username": request.user.username, + "session_key": request.session.session_key, + }, + ) else: - logger.error('%s', e, exc_info=True) + logger.error( + e, + exc_info=True, + extra={ + "username": request.user.username, + "session_key": request.session.session_key, + }, + ) message = str(e) - status = 500 - - resp = {'message': message} - - return HttpResponse(json.dumps(resp), - status=status, content_type='application/json') + return JsonResponse({"message": message}, status=status) + except Exception as e: # pylint: disable=broad-except + logger.error(e, exc_info=True) + return JsonResponse({"message": "Something went wrong here..."}, status=500) class AuthenticatedApiView(BaseApiView): + """ + Extends BaseApiView to require authenticated requests + """ def dispatch(self, request, *args, **kwargs): """Returns 401 if user is not authenticated.""" @@ -61,6 +106,20 @@ def dispatch(self, request, *args, **kwargs): return super(AuthenticatedApiView, self).dispatch(request, *args, **kwargs) +class AuthenticatedAllowJwtApiView(AuthenticatedApiView): + """ + Extends AuthenticatedApiView to also allow JWT access in addition to django session cookie + """ + + @method_decorator(csrf_exempt, name="dispatch") + @method_decorator(tapis_jwt_login) + def dispatch(self, request, *args, **kwargs): + """Returns 401 if user is not authenticated like AuthenticatedApiView but allows JWT access.""" + return super(AuthenticatedAllowJwtApiView, self).dispatch( + request, *args, **kwargs + ) + + class LoggerApi(BaseApiView): """ Logger API for capturing logs from the front-end. @@ -80,15 +139,19 @@ def post(self, request): Returns: HTTP 202 """ - log_json = request.body.decode('utf-8') + log_json = request.body.decode("utf-8") log_data = json.loads(log_json) - level = getLevelName(log_data.pop('level', 'INFO')) - name = log_data.pop('name'); - - logger.log(level, '%s: %s', name, json.dumps(log_data), extra={ - 'user': request.user.username, - 'referer': request.META.get('HTTP_REFERER') - }) - return HttpResponse('OK', status=202) - - + level = getLevelName(log_data.pop("level", "INFO")) + name = log_data.pop("name") + + logger.log( + level, + "%s: %s", + name, + json.dumps(log_data), + extra={ + "user": request.user.username, + "referer": request.META.get("HTTP_REFERER"), + }, + ) + return HttpResponse("OK", status=202) diff --git a/designsafe/apps/applications/views.py b/designsafe/apps/applications/views.py index c82237bf6b..455e30d07d 100644 --- a/designsafe/apps/applications/views.py +++ b/designsafe/apps/applications/views.py @@ -1,4 +1,4 @@ -from agavepy.agave import Agave, AgaveException, load_resource +# from agavepy.agave import Agave, AgaveException, load_resource from designsafe.apps.licenses.models import LICENSE_TYPES, get_license_info from designsafe.apps.notifications.views import get_number_unread_notifications from designsafe.libs.common.decorators import profile as profile_fn @@ -17,7 +17,7 @@ logger = logging.getLogger(__name__) metrics = logging.getLogger('metrics') -AGAVE_RESOURCES = load_resource(getattr(settings, 'AGAVE_TENANT_BASEURL')) +# AGAVE_RESOURCES = load_resource(getattr(settings, 'AGAVE_TENANT_BASEURL')) @login_required diff --git a/designsafe/apps/auth/README.md b/designsafe/apps/auth/README.md index ed0e415cbb..de247e4a59 100644 --- a/designsafe/apps/auth/README.md +++ b/designsafe/apps/auth/README.md @@ -9,27 +9,27 @@ support the various authentication requirements of DesignSafe. Authenticate directly against TACC's TAS Identity Store. This backend is used when authenticating directly to the Django Admin app. An OAuth token will not be obtained when -using this backend, so using Agave/DesignSafe API features will not work. +using this backend, so using Tapis/DesignSafe API features will not work. -### AgaveOAuthBackend +### TapisOAuthBackend -Authenticate using Agave OAuth Webflow (authorization code). See the [Agave Authentication Docs][1] +Authenticate using Tapis OAuth Webflow (authorization code). See the [Tapis Authentication Docs][1] for complete documentation. -#### AgaveTokenRefreshMiddleware +#### TapisTokenRefreshMiddleware -OAuth tokens obtained from Agave are valid for a limited time, usually one hour (3600s). +OAuth tokens obtained from Tapis are valid for a limited time, usually ten days (14400s). The app can automatically refresh the OAuth token as necessary. Add the refresh middleware in `settings.py`. The middleware *must* appear after `django.contrib.sessions.middleware.SessionMiddleware`: ``` -MIDDLEWARE_CLASSES = ( +MIDDLEWARE = ( ..., 'django.contrib.sessions.middleware.SessionMiddleware', - designsafe.apps.auth.middleware.AgaveTokenRefreshMiddleware, + designsafe.apps.auth.middleware.TapisTokenRefreshMiddleware, ..., ) ``` -[1]: http://agaveapi.co/documentation/authorization-guide/#authorization_code_flow \ No newline at end of file +[1]: https://tapis.readthedocs.io/en/latest/technical/authentication.html#authorization-code-grant-generating-tokens-for-users diff --git a/designsafe/apps/auth/backends.py b/designsafe/apps/auth/backends.py index f822bf649a..b668a94b3b 100644 --- a/designsafe/apps/auth/backends.py +++ b/designsafe/apps/auth/backends.py @@ -1,51 +1,57 @@ +"""Auth backends""" + +import logging +import re from django.conf import settings from django.contrib.auth import get_user_model +from django.contrib.auth.backends import ModelBackend +from tapipy.tapis import Tapis +from tapipy.errors import BaseTapyException +from designsafe.apps.accounts.models import DesignSafeProfile, NotificationPreferences +from designsafe.apps.api.users.utils import get_user_data +from designsafe.apps.auth.models import TapisOAuthToken from django.contrib.auth.signals import user_logged_out from django.contrib import messages -from django.contrib.auth.backends import ModelBackend from django.core.exceptions import ValidationError from django.dispatch import receiver -from designsafe.apps.accounts.models import DesignSafeProfile, NotificationPreferences -from designsafe.apps.api.agave import get_service_account_client from designsafe.apps.auth.tasks import update_institution_from_tas from pytas.http import TASClient -import logging -import re -import requests -from requests.auth import HTTPBasicAuth + +logger = logging.getLogger(__name__) @receiver(user_logged_out) def on_user_logged_out(sender, request, user, **kwargs): - backend = request.session.get('_auth_user_backend', None) - tas_backend_name = '%s.%s' % (TASBackend.__module__, - TASBackend.__name__) - agave_backend_name = '%s.%s' % (AgaveOAuthBackend.__module__, - AgaveOAuthBackend.__name__) + "Signal processor for user_logged_out" + backend = request.session.get("_auth_user_backend", None) + tas_backend_name = "%s.%s" % (TASBackend.__module__, TASBackend.__name__) + tapis_backend_name = "%s.%s" % ( + TapisOAuthBackend.__module__, + TapisOAuthBackend.__name__, + ) if backend == tas_backend_name: - login_provider = 'TACC' - elif backend == agave_backend_name: - login_provider = 'TACC' - else: - login_provider = 'your authentication provider' - - logger = logging.getLogger(__name__) - logger.debug("attempting call to revoke agave token function: %s", user.agave_oauth.token) - a = AgaveOAuthBackend() - AgaveOAuthBackend.revoke(a,user.agave_oauth) - - logout_message = '

    You are Logged Out!

    ' \ - 'You are now logged out of DesignSafe! However, you may still ' \ - 'be logged in at %s. To ensure security, you should close your ' \ - 'browser to end all authenticated sessions.' % login_provider + login_provider = "TACC" + elif backend == tapis_backend_name: + login_provider = "TACC" + + logger.info( + "Revoking tapis token: %s", TapisOAuthToken().get_masked_token(user.tapis_oauth.access_token) + ) + backend = TapisOAuthBackend() + TapisOAuthBackend.revoke(backend, user.tapis_oauth.access_token) + + logout_message = ( + "

    You are Logged Out!

    " + "You are now logged out of DesignSafe! However, you may still " + f"be logged in at {login_provider}. To ensure security, you should close your " + "browser to end all authenticated sessions." + ) messages.warning(request, logout_message) class TASBackend(ModelBackend): - logger = logging.getLogger(__name__) - def __init__(self): self.tas = TASClient() @@ -56,20 +62,31 @@ def authenticate(self, request, username=None, password=None, **kwargs): if username is not None and password is not None: tas_user = None if request is not None: - self.logger.info('Attempting login via TAS for user "%s" from IP "%s"' % (username, request.META.get('REMOTE_ADDR'))) + self.logger.info( + 'Attempting login via TAS for user "%s" from IP "%s"' + % (username, request.META.get("REMOTE_ADDR")) + ) else: - self.logger.info('Attempting login via TAS for user "%s" from IP "%s"' % (username, 'unknown')) + self.logger.info( + 'Attempting login via TAS for user "%s" from IP "%s"' + % (username, "unknown") + ) try: # Check if this user is valid on the mail server if self.tas.authenticate(username, password): tas_user = self.tas.get_user(username=username) self.logger.info('Login successful for user "%s"' % username) else: - raise ValidationError('Authentication Error', 'Your username or password is incorrect.') + raise ValidationError( + "Authentication Error", + "Your username or password is incorrect.", + ) except Exception as e: self.logger.warning(e.args) - if re.search(r'PendingEmailConfirmation', e.args[1]): - raise ValidationError('Please confirm your email address before logging in.') + if re.search(r"PendingEmailConfirmation", e.args[1]): + raise ValidationError( + "Please confirm your email address before logging in." + ) else: raise ValidationError(e.args[1]) @@ -78,27 +95,30 @@ def authenticate(self, request, username=None, password=None, **kwargs): try: # Check if the user exists in Django's local database user = UserModel.objects.get(username=username) - user.first_name = tas_user['firstName'] - user.last_name = tas_user['lastName'] - user.email = tas_user['email'] + user.first_name = tas_user["firstName"] + user.last_name = tas_user["lastName"] + user.email = tas_user["email"] user.save() except UserModel.DoesNotExist: # Create a user in Django's local database - self.logger.info('Creating local user record for "%s" from TAS Profile' % username) + self.logger.info( + 'Creating local user record for "%s" from TAS Profile' + % username + ) user = UserModel.objects.create_user( username=username, - first_name=tas_user['firstName'], - last_name=tas_user['lastName'], - email=tas_user['email'] - ) + first_name=tas_user["firstName"], + last_name=tas_user["lastName"], + email=tas_user["email"], + ) try: profile = DesignSafeProfile.objects.get(user=user) - profile.institution = tas_user.get('institution', None) + profile.institution = tas_user.get("institution", None) profile.save() except DesignSafeProfile.DoesNotExist: profile = DesignSafeProfile(user=user) - profile.institution = tas_user.get('institution', None) + profile.institution = tas_user.get("institution", None) profile.save() try: @@ -110,72 +130,67 @@ def authenticate(self, request, username=None, password=None, **kwargs): return user -# class CILogonBackend(ModelBackend): - -# def authenticate(self, **kwargs): -# return None - - -class AgaveOAuthBackend(ModelBackend): - - logger = logging.getLogger(__name__) +class TapisOAuthBackend(ModelBackend): def authenticate(self, *args, **kwargs): user = None - if 'backend' in kwargs and kwargs['backend'] == 'agave': - token = kwargs['token'] - base_url = getattr(settings, 'AGAVE_TENANT_BASEURL') + if "backend" in kwargs and kwargs["backend"] == "tapis": + token = kwargs["token"] - self.logger.info('Attempting login via Agave with token "%s"' % - token[:8].ljust(len(token), '-')) + logger.info( + 'Attempting login via Tapis with token "%s"' % TapisOAuthToken().get_masked_token(token) + ) + client = Tapis(base_url=settings.TAPIS_TENANT_BASEURL, access_token=token) - # TODO make this into an AgavePy call - response = requests.get('%s/profiles/v2/me' % base_url, - headers={'Authorization': 'Bearer %s' % token}) - if response.status_code >= 200 and response.status_code <= 299: - json_result = response.json() - agave_user = json_result['result'] - username = agave_user['username'] - UserModel = get_user_model() - try: - user = UserModel.objects.get(username=username) - user.first_name = agave_user['first_name'] - user.last_name = agave_user['last_name'] - user.email = agave_user['email'] - user.save() - except UserModel.DoesNotExist: - self.logger.info('Creating local user record for "%s" ' - 'from Agave Profile' % username) - user = UserModel.objects.create_user( - username=username, - first_name=agave_user['first_name'], - last_name=agave_user['last_name'], - email=agave_user['email'] - ) + try: + tapis_user_info = client.authenticator.get_userinfo() + except BaseTapyException as e: + logger.info("Tapis Authentication failed: %s", e.message) + return None - try: - profile = DesignSafeProfile.objects.get(user=user) - except DesignSafeProfile.DoesNotExist: - profile = DesignSafeProfile(user=user) - profile.save() - update_institution_from_tas.apply_async(args=[username], queue='api') + username = tapis_user_info.username - try: - prefs = NotificationPreferences.objects.get(user=user) - except NotificationPreferences.DoesNotExist: - prefs = NotificationPreferences(user=user) - prefs.save() + try: + user_data = get_user_data(username=username) + defaults = { + "first_name": user_data["firstName"], + "last_name": user_data["lastName"], + "email": user_data["email"], + } + except Exception: + logger.exception( + "Error retrieving TAS user profile data for user: %s", username + ) + defaults = { + "first_name": tapis_user_info.given_name, + "last_name": tapis_user_info.last_name, + "email": tapis_user_info.email, + } + + user, created = get_user_model().objects.update_or_create( + username=username, defaults=defaults + ) + + if created: + logger.info( + 'Created local user record for "%s" from TAS Profile', username + ) + + DesignSafeProfile.objects.get_or_create(user=user) + NotificationPreferences.objects.get_or_create(user=user) + + update_institution_from_tas.apply_async(args=[username], queue="api") + + logger.info('Login successful for user "%s"', username) - self.logger.error('Login successful for user "%s"' % username) - else: - self.logger.error('Agave Authentication failed: %s' % response.text) return user - def revoke(self, user): - base_url = getattr(settings, 'AGAVE_TENANT_BASEURL') - self.logger.info("attempting to revoke agave token %s" % user.masked_token) - response = requests.post('{base_url}/revoke'.format(base_url = base_url), - auth=HTTPBasicAuth(settings.AGAVE_CLIENT_KEY, settings.AGAVE_CLIENT_SECRET), - data={'token': user.access_token}) - self.logger.info("revoke response is %s" % response) + def revoke(self, token): + logger.info( + "Attempting to revoke Tapis token %s" % TapisOAuthToken().get_masked_token(token) + ) + + client = Tapis(base_url=settings.TAPIS_TENANT_BASEURL, access_token=token) + response = client.authenticator.revoke_token(token=token) + logger.info("revoke response is %s" % response) diff --git a/designsafe/apps/auth/backends_unit_test.py b/designsafe/apps/auth/backends_unit_test.py new file mode 100644 index 0000000000..f02d3e4680 --- /dev/null +++ b/designsafe/apps/auth/backends_unit_test.py @@ -0,0 +1,97 @@ +import pytest +from django.contrib.auth import get_user_model +from mock import Mock +from designsafe.apps.auth.backends import TapisOAuthBackend +from tapipy.tapis import TapisResult +from tapipy.errors import BaseTapyException + +pytestmark = pytest.mark.django_db + + +@pytest.fixture +def user_data_mock(mocker): + mock_user_data = mocker.patch( + "designsafe.apps.auth.backends.get_user_data", + return_value={ + "username": "testuser", + "firstName": "test", + "lastName": "user", + "email": "new@email.com", + }, + ) + return mock_user_data + + +@pytest.fixture() +def tapis_mock(mocker): + tapis_patcher = mocker.patch("designsafe.apps.auth.backends.Tapis") + mock_tapis = Mock() + mock_tapis.authenticator.get_userinfo.return_value = TapisResult( + username="testuser" + ) + tapis_patcher.return_value = mock_tapis + yield tapis_patcher + + +@pytest.fixture() +def update_institution_from_tas_mock(mocker): + yield mocker.patch("designsafe.apps.auth.backends.update_institution_from_tas") + + +# def test_launch_setup_checks(mocker, regular_user, settings): +# mocker.patch("designsafe.apps.auth.views.new_user_setup_check") +# mock_execute = mocker.patch("designsafe.apps.auth.views.execute_setup_steps") +# regular_user.profile.setup_complete = False +# launch_setup_checks(regular_user) +# mock_execute.apply_async.assert_called_with(args=["username"]) + + +def test_bad_backend_params(tapis_mock): + # Test backend authenticate with no backend params + backend = TapisOAuthBackend() + result = backend.authenticate() + assert result is None + + # Test TapisOAuthBackend if params do not indicate tapis + result = backend.authenticate(backend="not_tapis") + assert result is None + + +def test_bad_response_status( + tapis_mock, user_data_mock, update_institution_from_tas_mock +): + """Test that backend failure responses are handled""" + backend = TapisOAuthBackend() + mock_tapis = Mock() + mock_tapis.authenticator.get_userinfo.side_effect = BaseTapyException + tapis_mock.return_value = mock_tapis + result = backend.authenticate(backend="tapis", token="1234") + assert result is None + + +def test_new_user(tapis_mock, user_data_mock, update_institution_from_tas_mock): + """Test that a new user is created and returned""" + backend = TapisOAuthBackend() + result = backend.authenticate(backend="tapis", token="1234") + assert result.username == "testuser" + + +def test_update_existing_user( + tapis_mock, user_data_mock, update_institution_from_tas_mock +): + """Test that an existing user's information is updated with from info from the Tapis backend response""" + backend = TapisOAuthBackend() + + # Create a pre-existing user with the same username + user = get_user_model().objects.create_user( + username="testuser", + first_name="test", + last_name="user", + email="old@email.com", + ) + result = backend.authenticate(backend="tapis", token="1234") + # Result user object should be the same + assert result == user + # Existing user object should be updated + user = get_user_model().objects.get(username="testuser") + assert user.email == "new@email.com" diff --git a/designsafe/apps/auth/context_processors.py b/designsafe/apps/auth/context_processors.py deleted file mode 100644 index b4ec8e65a0..0000000000 --- a/designsafe/apps/auth/context_processors.py +++ /dev/null @@ -1,14 +0,0 @@ -from designsafe.apps.auth.models import AgaveOAuthToken - - -def auth(request): - try: - ag_token = request.user.agave_oauth - context = { - 'agave_ready': ag_token is not None - } - except (AttributeError, AgaveOAuthToken.DoesNotExist): - context = { - 'agave_ready': False - } - return context diff --git a/designsafe/apps/auth/middleware.py b/designsafe/apps/auth/middleware.py index acfc8174a5..10a9e81336 100644 --- a/designsafe/apps/auth/middleware.py +++ b/designsafe/apps/auth/middleware.py @@ -1,36 +1,75 @@ +""" +Auth middleware +""" + +import logging from django.contrib.auth import logout from django.core.exceptions import ObjectDoesNotExist -from requests.exceptions import RequestException, HTTPError -import logging -from django.utils.deprecation import MiddlewareMixin +from django.db import transaction +from django.http import HttpResponseRedirect +from django.urls import reverse +from tapipy.errors import BaseTapyException +from designsafe.apps.auth.models import TapisOAuthToken logger = logging.getLogger(__name__) -class AgaveTokenRefreshMiddleware(MiddlewareMixin): +class TapisTokenRefreshMiddleware: + """Refresh Middleware for a User's Tapis OAuth Token""" - def process_request(self, request): - if request.path != '/logout/' and request.user.is_authenticated: - try: - agave_oauth = request.user.agave_oauth - if agave_oauth.expired: - try: - agave_oauth.client.token.refresh() - except HTTPError: - logger.exception('Agave Token refresh failed; Forcing logout', - extra={'user': request.user.username}) - logout(request) - except ObjectDoesNotExist: - logger.warn('Authenticated user missing Agave API Token', - extra={'user': request.user.username}) - logout(request) - except RequestException: - logger.exception('Agave Token refresh failed. Forcing logout', - extra={'user': request.user.username}) - logout(request) - - def process_response(self, request, response): - if hasattr(request, 'user'): - if request.user.is_authenticated: - response['Authorization'] = 'Bearer ' + request.user.agave_oauth.access_token + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + if ( + request.path != reverse("logout") + and request.path != reverse("login") + and not request.path.startswith("/static/") + and request.user.is_authenticated + ): + self.process_request(request) + + response = self.get_response(request) return response + + def process_request(self, request): + """Processes requests to backend and refreshes Tapis Token atomically if token is expired.""" + try: + tapis_oauth = request.user.tapis_oauth + except ObjectDoesNotExist: + logger.warning( + "Authenticated user missing Tapis OAuth Token", + extra={"user": request.user.username}, + ) + logout(request) + return HttpResponseRedirect(reverse("designsafe_auth:login")) + + if not tapis_oauth.expired: + return + + logger.info( + f"Tapis OAuth token expired for user {request.user.username}. Refreshing token" + ) + with transaction.atomic(): + # Get a lock on this user's token row in db. + latest_token = ( + TapisOAuthToken.objects.select_for_update() + .filter(user=request.user) + .first() + ) + if latest_token.expired: + try: + logger.info("Refreshing Tapis OAuth token") + tapis_oauth.refresh_tokens() + except BaseTapyException: + logger.exception( + "Tapis Token refresh failed. Forcing logout", + extra={"user": request.user.username}, + ) + logout(request) + return HttpResponseRedirect(reverse("designsafe_auth:login")) + + else: + logger.info( + "Token updated by another request. Refreshing token from DB." + ) diff --git a/designsafe/apps/auth/migrations/0003_tapisoauthtoken_delete_agaveoauthtoken.py b/designsafe/apps/auth/migrations/0003_tapisoauthtoken_delete_agaveoauthtoken.py new file mode 100644 index 0000000000..29a3f33c85 --- /dev/null +++ b/designsafe/apps/auth/migrations/0003_tapisoauthtoken_delete_agaveoauthtoken.py @@ -0,0 +1,44 @@ +# Generated by Django 4.2.6 on 2024-02-28 21:26 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("designsafe_auth", "0002_auto_20160209_0427"), + ] + + operations = [ + migrations.CreateModel( + name="TapisOAuthToken", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("access_token", models.CharField(max_length=2048)), + ("refresh_token", models.CharField(max_length=2048)), + ("expires_in", models.BigIntegerField()), + ("created", models.BigIntegerField()), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="tapis_oauth", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + migrations.DeleteModel( + name="AgaveOAuthToken", + ), + ] diff --git a/designsafe/apps/auth/models.py b/designsafe/apps/auth/models.py index 9f57bc9fb8..6db226f3f0 100644 --- a/designsafe/apps/auth/models.py +++ b/designsafe/apps/auth/models.py @@ -1,126 +1,115 @@ -from django.db import models -from django.conf import settings -from agavepy.agave import Agave -from agavepy import agave +"""Auth models +""" + import logging -import six import time -import requests -from requests import HTTPError -# from .signals import * -from designsafe.libs.common.decorators import deprecated +from django.db import models +from django.conf import settings +from tapipy.tapis import Tapis logger = logging.getLogger(__name__) TOKEN_EXPIRY_THRESHOLD = 600 -AGAVE_RESOURCES = agave.load_resource(getattr(settings, 'AGAVE_TENANT_BASEURL')) -class AgaveOAuthToken(models.Model): - user = models.OneToOneField(settings.AUTH_USER_MODEL, related_name='agave_oauth', on_delete=models.CASCADE) - token_type = models.CharField(max_length=255) - scope = models.CharField(max_length=255) - access_token = models.CharField(max_length=255) - refresh_token = models.CharField(max_length=255) +class TapisOAuthToken(models.Model): + """Represents an Tapis OAuth Token object. + + Use this class to store login details as well as refresh a token. + """ + + user = models.OneToOneField( + settings.AUTH_USER_MODEL, related_name="tapis_oauth", on_delete=models.CASCADE + ) + access_token = models.CharField(max_length=2048) + refresh_token = models.CharField(max_length=2048) expires_in = models.BigIntegerField() created = models.BigIntegerField() - @property - def masked_token(self): - return self.access_token[:8].ljust(len(self.access_token), '-') - @property def expired(self): - current_time = time.time() - return self.created + self.expires_in - current_time - TOKEN_EXPIRY_THRESHOLD <= 0 + """Check if token is expired + + :return: True or False, depending if the token is expired. + :rtype: bool + """ + return self.is_token_expired(self.created, self.expires_in) @property def created_at(self): - """ - Map the agavepy.Token property to model property + """Map the tapipy.Token property to model property + :return: The Epoch timestamp this token was created + :rtype: int """ return self.created_at @created_at.setter def created_at(self, value): - """ - Map the agavepy.Token property to model property - :param value: The Epoch timestamp this token was created + """Map the tapipy.Token property to model property + + :param int value: The Epoch timestamp this token was created """ self.created = value @property def token(self): + """Token dictionary. + + :return: Full token object + :rtype: dict + """ return { - 'access_token': self.access_token, - 'refresh_token': self.refresh_token, - 'token_type': self.token_type, - 'scope': self.scope, - 'created': self.created, - 'expires_in': self.expires_in + "access_token": self.access_token, + "refresh_token": self.refresh_token, + "created": self.created, + "expires_in": self.expires_in, } @property def client(self): - return Agave(api_server=getattr(settings, 'AGAVE_TENANT_BASEURL'), - api_key=getattr(settings, 'AGAVE_CLIENT_KEY'), - api_secret=getattr(settings, 'AGAVE_CLIENT_SECRET'), - token=self.access_token, - resources=AGAVE_RESOURCES, - refresh_token=self.refresh_token, - token_callback=self.update) + """Tapis client to limit one request to Tapis per User. + + :return: Tapis client using refresh token. + :rtype: :class:Tapis + """ + return Tapis( + base_url=getattr(settings, "TAPIS_TENANT_BASEURL"), + client_id=getattr(settings, "TAPIS_CLIENT_ID"), + client_key=getattr(settings, "TAPIS_CLIENT_KEY"), + access_token=self.access_token, + refresh_token=self.refresh_token, + ) def update(self, **kwargs): - for k, v in six.iteritems(kwargs): + """Bulk update model attributes""" + for k, v in kwargs.items(): setattr(self, k, v) self.save() - @deprecated - def refresh(self): - """ - DEPRECATED - :return: - """ - logger.debug('Refreshing Agave OAuth token for user=%s' % self.user.username) - ag = Agave(api_server=getattr(settings, 'AGAVE_TENANT_BASEURL'), - api_key=getattr(settings, 'AGAVE_CLIENT_KEY'), - api_secret=getattr(settings, 'AGAVE_CLIENT_SECRET'), - resources=AGAVE_RESOURCES, - token=self.access_token, - refresh_token=self.refresh_token) + def refresh_tokens(self): + """Refresh and update Tapis OAuth Tokens""" + self.client.refresh_tokens() + self.update( + created=int(time.time()), + access_token=self.client.access_token.access_token, + refresh_token=self.client.refresh_token.refresh_token, + expires_in=self.client.access_token.expires_in().total_seconds(), + ) + + def __str__(self): + access_token_masked = self.access_token[-5:] + refresh_token_masked = self.refresh_token[-5:] + return f"access_token:{access_token_masked} refresh_token:{refresh_token_masked} expires_in:{self.expires_in} created:{self.created}" + + @staticmethod + def is_token_expired(created, expires_in): + """Check if token is expired, with TOKEN_EXPIRY_THRESHOLD buffer.""" current_time = time.time() - ag.token.refresh() - self.created = int(current_time) - self.update(**ag.token.token_info) - logger.debug('Agave OAuth token for user=%s refreshed: %s' % (self.user.username, - self.masked_token)) - - -class AgaveServiceStatus(object): - page_id = getattr(settings, 'AGAVE_STATUSIO_PAGE_ID', '53a1e022814a437c5a000781') - status_io_base_url = getattr(settings, 'STATUSIO_BASE_URL', - 'https://api.status.io/1.0') - status_overall = {} - status = [] - incidents = [] - maintenance = { - 'active': [], - 'upcoming': [], - } - - def __init__(self): - self.update() - - def update(self): - try: - resp = requests.get('%s/status/%s' % (self.status_io_base_url, self.page_id)) - data = resp.json() - if 'result' in data: - for k, v, in six.iteritems(data['result']): - setattr(self, k, v) - else: - raise Exception(data) - except HTTPError: - logger.warning('Agave Service Status update failed') + return created + expires_in - current_time - TOKEN_EXPIRY_THRESHOLD <= 0 + + @staticmethod + def get_masked_token(token): + """Return a token as a masked string""" + return token[:8].ljust(len(token), "-") diff --git a/designsafe/apps/auth/models_unit_test.py b/designsafe/apps/auth/models_unit_test.py new file mode 100644 index 0000000000..cbd9d159c2 --- /dev/null +++ b/designsafe/apps/auth/models_unit_test.py @@ -0,0 +1,46 @@ +import pytest +import time +from datetime import timedelta +from designsafe.apps.auth.models import TapisOAuthToken + +pytestmark = pytest.mark.django_db + + +@pytest.fixture +def authenticated_user_with_expired_token(authenticated_user): + authenticated_user.tapis_oauth.expires_in = 0 + authenticated_user.tapis_oauth.save() + yield authenticated_user + + +@pytest.fixture +def authenticated_user_with_valid_token(authenticated_user): + authenticated_user.tapis_oauth.created = time.time() + authenticated_user.tapis_oauth.save() + yield authenticated_user + + +@pytest.fixture() +def tapis_client_mock(mocker): + mock_client = mocker.patch("designsafe.apps.auth.models.TapisOAuthToken.client") + mock_client.access_token.access_token = ("XYZXYZXYZ",) + mock_client.access_token.expires_in.return_value = timedelta(seconds=2000) + yield mock_client + + +def test_valid_user(client, authenticated_user_with_valid_token, tapis_client_mock): + tapis_oauth = ( + TapisOAuthToken.objects.filter(user=authenticated_user_with_valid_token) + .select_for_update() + .get() + ) + assert not tapis_oauth.expired + + +def test_expired_user(client, authenticated_user_with_expired_token, tapis_client_mock): + tapis_oauth = ( + TapisOAuthToken.objects.filter(user=authenticated_user_with_expired_token) + .select_for_update() + .get() + ) + assert tapis_oauth.expired diff --git a/designsafe/apps/auth/tasks.py b/designsafe/apps/auth/tasks.py index 99f07e4738..bc95a2bd63 100644 --- a/designsafe/apps/auth/tasks.py +++ b/designsafe/apps/auth/tasks.py @@ -1,14 +1,18 @@ from datetime import datetime, timedelta import requests from django.conf import settings +from django.contrib.auth import get_user_model +from django.core.exceptions import ObjectDoesNotExist from django.core.mail import send_mail -from agavepy.agave import Agave, AgaveException +from designsafe.apps.api.agave import get_service_account_client, get_tg458981_client from designsafe.apps.api.tasks import agave_indexer from designsafe.apps.api.notifications.models import Notification from celery import shared_task from django.contrib.auth import get_user_model from pytas.http import TASClient - +from tapipy.errors import NotFoundError, BaseTapyException +from designsafe.utils.system_access import register_public_key, create_system_credentials +from designsafe.utils.encryption import createKeyPair from requests import HTTPError from django.contrib.auth import get_user_model import logging @@ -17,76 +21,77 @@ logger = logging.getLogger(__name__) -@shared_task(default_retry_delay=30, max_retries=3) -def check_or_create_agave_home_dir(username, systemId): +def get_systems_to_configure(username): + """ Get systems to configure either during startup or for new user """ + + systems = [] + for system in settings.TAPIS_SYSTEMS_TO_CONFIGURE: + system_copy = system.copy() + system_copy['path'] = system_copy['path'].format(username=username) + systems.append(system_copy) + return systems + + +@shared_task(default_retry_delay=30, max_retries=3, queue='onboarding') +def check_or_configure_system_and_user_directory(username, system_id, path, create_path): try: - # TODO should use OS calls to create directory. - logger.info( - "Checking home directory for user=%s on " - "default storage systemId=%s", - username, - systemId + user_client = get_user_model().objects.get(username=username).tapis_oauth.client + user_client.files.listFiles( + systemId=system_id, path=path ) - ag = Agave(api_server=settings.AGAVE_TENANT_BASEURL, - token=settings.AGAVE_SUPER_TOKEN) - try: - listing_response = ag.files.list( - systemId=systemId, - filePath=username) - logger.info('check home dir response: {}'.format(listing_response)) - - except HTTPError as e: - if e.response.status_code == 404: - logger.info("Creating the home directory for user=%s then going to run setfacl", username) - body = {'action': 'mkdir', 'path': username} - fm_response = ag.files.manage(systemId=systemId, - filePath='', - body=body) - logger.info('mkdir response: {}'.format(fm_response)) - - ds_admin_client = Agave( - api_server=getattr( - settings, - 'AGAVE_TENANT_BASEURL' - ), - token=getattr( - settings, - 'AGAVE_SUPER_TOKEN' - ), + logger.info(f"System Works: " + f"Checked and there is no need to configure system:{system_id} path:{path} for {username}") + return + except ObjectDoesNotExist: + # User is missing; handling email confirmation process where user has not logged in + logger.info(f"New User: " + f"Checked and there is a need to configure system:{system_id} path:{path} for {username} ") + except BaseTapyException as e: + logger.info(f"Unable to list system/files: " + f"Checked and there is a need to configure system:{system_id} path:{path} for {username}: {e}") + + try: + if create_path: + tg458981_client = get_tg458981_client() + try: + # User tg account to check if path exists + tg458981_client.files.listFiles(systemId=system_id, path=path) + logger.info(f"Directory for user={username} on system={system_id}/{path} exists and works. ") + except NotFoundError: + logger.info("Creating the directory for user=%s then going to run setfacl on system=%s path=%s", + username, + system_id, + path) + + tg458981_client.files.mkdir(systemId=system_id, path=path) + tg458981_client.files.setFacl( + systemId=system_id, + path=path, + operation="ADD", + recursionMethod="PHYSICAL", + aclString=f"d:u:{username}:rwX,u:{username}:rwX,d:u:tg458981:rwX,u:tg458981:rwX,d:o::---,o::---,d:m::rwX,m::rwX", + ) + agave_indexer.apply_async( + kwargs={"systemId": system_id, "filePath": path, "recurse": False}, + queue="indexing", ) - if systemId == settings.AGAVE_STORAGE_SYSTEM: - job_body = { - 'parameters': { - 'username': username, - 'directory': 'shared/{}'.format(username) - }, - 'name': f'setfacl mydata for user {username}', - 'appId': 'setfacl_corral3-0.1' - } - elif systemId == settings.AGAVE_WORKING_SYSTEM: - job_body = { - 'parameters': { - 'username': username, - }, - 'name': f'setfacl work for user {username}', - 'appId': 'setfacl_frontera_work-0.1' - } - else: - logger.error('Attempting to set permissions on unsupported system: {}'.format(systemId)) - return - - jobs_response = ds_admin_client.jobs.submit(body=job_body) - logger.info('setfacl response: {}'.format(jobs_response)) - - # add dir to index - logger.info("Indexing the home directory for user=%s", username) - agave_indexer.apply_async(kwargs={'username': username, 'systemId': systemId, 'filePath': username}, queue='indexing') - - except AgaveException: - logger.exception('Failed to create home directory.', + # create keys, push to key service and use as credential for Tapis system + logger.info("Creating credentials for user=%s on system=%s", username, system_id) + (private_key, public_key) = createKeyPair() + register_public_key(username, public_key, system_id) + service_account = get_service_account_client() + create_system_credentials(service_account, + username, + public_key, + private_key, + system_id) + except BaseTapyException: + logger.exception('Failed to configure system (i.e. create directory, set acl, create credentials).', extra={'user': username, - 'systemId': systemId}) + 'systemId': system_id, + 'path': path}) + raise self.retry(exc=exc) @shared_task(default_retry_delay=30, max_retries=3) @@ -97,7 +102,7 @@ def new_user_alert(username): 'Name: ' + user.first_name + ' ' + user.last_name + '\n' + 'Id: ' + str(user.id) + '\n', settings.DEFAULT_FROM_EMAIL, settings.NEW_ACCOUNT_ALERT_EMAILS.split(','),) - + tram_headers = {"tram-services-key": settings.TRAM_SERVICES_KEY} tram_body = {"project_id": settings.TRAM_PROJECT_ID, "email": user.email} diff --git a/designsafe/apps/auth/templates/designsafe/apps/auth/login.html b/designsafe/apps/auth/templates/designsafe/apps/auth/login.html index 6b8e292501..a10dbc2408 100644 --- a/designsafe/apps/auth/templates/designsafe/apps/auth/login.html +++ b/designsafe/apps/auth/templates/designsafe/apps/auth/login.html @@ -23,13 +23,13 @@

    Log in {% endif %} -{% endif %} {% addtoblock "css" %} @@ -52,21 +51,22 @@ {% endaddtoblock %} +{% endif %} -{% addtoblock "js" %} +{% addtoblock "react_assets" %} {% if debug and react_flag %} - - - + + {% else %} - + {% include "react-assets.html" %} {% endif %} {% endaddtoblock %} -{% endblock %} \ No newline at end of file +{% endblock %} diff --git a/designsafe/apps/workspace/tests.py b/designsafe/apps/workspace/tests.py index c1261d9a1c..0e09e88518 100644 --- a/designsafe/apps/workspace/tests.py +++ b/designsafe/apps/workspace/tests.py @@ -1,40 +1,48 @@ import json -import os from mock import patch from django.test import TestCase from .models.app_descriptions import AppDescription +from unittest import skip from django.urls import reverse from django.contrib.auth import get_user_model +@skip("TODOv3: Update apps api with Tapisv3") class AppDescriptionModelTest(TestCase): - fixtures = ['user-data', 'agave-oauth-token-data'] + fixtures = ["user-data", "auth"] def setUp(self): user = get_user_model().objects.get(pk=2) - user.set_password('user/password') + user.set_password("user/password") user.save() def test_string_representation(self): - descriptionModel = AppDescription(appid='TestApp0.1', appdescription='Test description') + descriptionModel = AppDescription( + appid="TestApp0.1", appdescription="Test description" + ) self.assertEqual(str(descriptionModel), descriptionModel.appid) def test_get_app_description(self): - AppDescription.objects.create(appid='TestApp0.1', appdescription='Test description') - self.client.login(username='ds_user', password='user/password') - url = reverse('designsafe_workspace:call_api', args=('description',)) - response = self.client.get(url, {'app_id': 'TestApp0.1'}) - self.assertContains(response, 'TestApp0.1') + AppDescription.objects.create( + appid="TestApp0.1", appdescription="Test description" + ) + self.client.login(username="ds_user", password="user/password") + url = reverse("designsafe_workspace:call_api", args=("description",)) + response = self.client.get(url, {"app_id": "TestApp0.1"}) + self.assertContains(response, "TestApp0.1") +@skip("TODOv3: Update apps api with Tapisv3") class TestAppsApiViews(TestCase): - fixtures = ['user-data', 'agave-oauth-token-data'] + fixtures = ["user-data", "auth"] @classmethod def setUpClass(cls): super(TestAppsApiViews, cls).setUpClass() - cls.mock_client_patcher = patch('designsafe.apps.auth.models.AgaveOAuthToken.client') + cls.mock_client_patcher = patch( + "designsafe.apps.auth.models.TapisOAuthToken.client" + ) cls.mock_client = cls.mock_client_patcher.start() @classmethod @@ -44,26 +52,20 @@ def tearDownClass(cls): def setUp(self): user = get_user_model().objects.get(pk=2) - user.set_password('user/password') + user.set_password("user/password") user.save() def test_apps_list(self): - self.client.login(username='ds_user', password='user/password') + self.client.login(username="ds_user", password="user/password") apps = [ - { - "id": "app-one", - "executionSystem": "stampede2" - }, - { - "id": "app-two", - "executionSystem": "stampede2" - } + {"id": "app-one", "executionSystem": "stampede2"}, + {"id": "app-two", "executionSystem": "stampede2"}, ] - #need to do a return_value on the mock_client because - #the calling signature is something like client = Agave(**kwargs).apps.list() + # need to do a return_value on the mock_client because + # the calling signature is something like client = Agave(**kwargs).apps.list() self.mock_client.apps.list.return_value = apps - url = reverse('designsafe_workspace:call_api', args=('apps',)) + url = reverse("designsafe_workspace:call_api", args=("apps",)) response = self.client.get(url, follow=True) data = response.json() # If the request is sent successfully, then I expect a response to be returned. @@ -72,37 +74,41 @@ def test_apps_list(self): self.assertTrue(data == apps) def test_job_submit_notifications(self): - with open('designsafe/apps/workspace/fixtures/job-submission.json') as f: + with open("designsafe/apps/workspace/fixtures/job-submission.json") as f: job_data = json.load(f) self.mock_client.jobs.submit.return_value = {"status": "ok"} - self.client.login(username='ds_user', password='user/password') + self.client.login(username="ds_user", password="user/password") - url = reverse('designsafe_workspace:call_api', args=('jobs',)) - response = self.client.post(url, json.dumps(job_data), content_type="application/json") + url = reverse("designsafe_workspace:call_api", args=("jobs",)) + response = self.client.post( + url, json.dumps(job_data), content_type="application/json" + ) data = response.json() self.assertTrue(self.mock_client.jobs.submit.called) - self.assertEqual(data['status'], 'ok') + self.assertEqual(data["status"], "ok") self.assertEqual(response.status_code, 200) def test_job_submit_parse_urls(self): - with open('designsafe/apps/workspace/fixtures/job-submission.json') as f: + with open("designsafe/apps/workspace/fixtures/job-submission.json") as f: job_data = json.load(f) # the spaces should get quoted out job_data["inputs"]["workingDirectory"] = "agave://test.system/name with spaces" self.mock_client.jobs.submit.return_value = {"status": "ok"} - self.client.login(username='ds_user', password='user/password') + self.client.login(username="ds_user", password="user/password") - url = reverse('designsafe_workspace:call_api', args=('jobs',)) - response = self.client.post(url, json.dumps(job_data), content_type="application/json") + url = reverse("designsafe_workspace:call_api", args=("jobs",)) + response = self.client.post( + url, json.dumps(job_data), content_type="application/json" + ) self.assertEqual(response.status_code, 200) args, kwargs = self.mock_client.jobs.submit.call_args body = kwargs["body"] input = body["inputs"]["workingDirectory"] - #the spaces should have been quoted + # the spaces should have been quoted self.assertTrue("%20" in input) def test_licensed_apps(self): diff --git a/designsafe/apps/workspace/urls.py b/designsafe/apps/workspace/urls.py index f17951f190..362316ed6a 100644 --- a/designsafe/apps/workspace/urls.py +++ b/designsafe/apps/workspace/urls.py @@ -1,22 +1,8 @@ -from django.urls import re_path as url -from django.urls import reverse -from django.utils.translation import gettext_lazy as _ +"""Workspace URLs +""" +from django.urls import re_path from designsafe.apps.workspace import views -# TODO look at linking directly into an app in the workspace - urlpatterns = [ - url(r'^$', views.index, name='index'), - url(r'^api/(?P[a-z]+?)/$', views.call_api, name='call_api'), - url(r'^notification/process/(?P\d+)', views.process_notification, name='process_notification'), + re_path('^', views.WorkspaceView.as_view(), name="workspace"), ] - -def menu_items(**kwargs): - if 'type' in kwargs and kwargs['type'] == 'research_workbench': - return [ - { - 'label': _('Workspace'), - 'url': reverse('designsafe_workspace:index'), - 'children': [] - } - ] diff --git a/designsafe/apps/workspace/views.py b/designsafe/apps/workspace/views.py index 20325b89cf..6c2ab98af8 100644 --- a/designsafe/apps/workspace/views.py +++ b/designsafe/apps/workspace/views.py @@ -1,286 +1,19 @@ -from agavepy.agave import AgaveException, Agave -from django.shortcuts import render, redirect -from django.conf import settings +""" +.. :module: apps.workspace.views + :synopsis: Views to handle Workspace +""" +from django.views.generic.base import TemplateView +from django.utils.decorators import method_decorator +from django.views.decorators.csrf import ensure_csrf_cookie from django.contrib.auth.decorators import login_required -from django.core.serializers.json import DjangoJSONEncoder -from django.urls import reverse -from django.core.exceptions import ObjectDoesNotExist -from django.http import HttpResponse -from designsafe.apps.api.notifications.models import Notification -from designsafe.apps.workspace.tasks import JobSubmitError, submit_job -from designsafe.apps.licenses.models import LICENSE_TYPES, get_license_info -from designsafe.libs.common.decorators import profile as profile_fn -from designsafe.apps.api.tasks import index_or_update_project -from designsafe.apps.workspace import utils as WorkspaceUtils -from designsafe.apps.workspace.models.app_descriptions import AppDescription -from requests import HTTPError -from urllib.parse import urlparse -from datetime import datetime -import json -import six -import logging -import urllib.request, urllib.parse, urllib.error -logger = logging.getLogger(__name__) -@login_required -def index(request): - context = { - } - return render(request, 'designsafe/apps/workspace/index.html', context) +@method_decorator(login_required, name="dispatch") +class WorkspaceView(TemplateView): + """Workspace View""" + template_name = 'designsafe/apps/workspace/index.html' - -def _app_license_type(app_id): - app_lic_type = app_id.replace('-{}'.format(app_id.split('-')[-1]), '').upper() - lic_type = next((t for t in LICENSE_TYPES if t in app_lic_type), None) - return lic_type - - -@profile_fn -@login_required -def call_api(request, service): - try: - agave = request.user.agave_oauth.client - if service == 'apps': - app_id = request.GET.get('app_id') - if app_id: - data = agave.apps.get(appId=app_id) - data['exec_sys'] = agave.systems.get(systemId=data['executionSystem']) - lic_type = _app_license_type(app_id) - data['license'] = { - 'type': lic_type - } - if lic_type is not None: - _, license_models = get_license_info() - license_model = [x for x in license_models if x.license_type == lic_type][0] - lic = license_model.objects.filter(user=request.user).first() - data['license']['enabled'] = lic is not None - - else: - - public_only = request.GET.get('publicOnly') - if public_only == 'true': - data = agave.apps.list(publicOnly='true') - else: - data = agave.apps.list() - - elif service == 'monitors': - target = request.GET.get('target') - ds_admin_client = Agave(api_server=getattr(settings, 'AGAVE_TENANT_BASEURL'), token=getattr(settings, 'AGAVE_SUPER_TOKEN')) - data = ds_admin_client.monitors.list(target=target) - - elif service == 'meta': - app_id = request.GET.get('app_id') - if request.method == 'GET': - if app_id: - data = agave.meta.get(appId=app_id) - lic_type = _app_license_type(app_id) - data['license'] = { - 'type': lic_type - } - if lic_type is not None: - _, license_models = get_license_info() - license_model = [x for x in license_models if x.license_type == lic_type][0] - lic = license_model.objects.filter(user=request.user).first() - data['license']['enabled'] = lic is not None - - else: - query = request.GET.get('q') - data = agave.meta.listMetadata(q=query) - elif request.method == 'POST': - meta_post = json.loads(request.body) - meta_uuid = meta_post.get('uuid') - - if meta_uuid: - del meta_post['uuid'] - data = agave.meta.updateMetadata(uuid=meta_uuid, body=meta_post) - index_or_update_project.apply_async(args=[meta_uuid], queue='api') - else: - data = agave.meta.addMetadata(body=meta_post) - elif request.method == 'DELETE': - meta_uuid = request.GET.get('uuid') - if meta_uuid: - data = agave.meta.deleteMetadata(uuid=meta_uuid) - - - # TODO: Need auth on this DELETE business - elif service == 'jobs': - if request.method == 'DELETE': - job_id = request.GET.get('job_id') - data = agave.jobs.delete(jobId=job_id) - elif request.method == 'POST': - job_post = json.loads(request.body) - logger.debug(job_post) - job_id = job_post.get('job_id') - - # cancel job / stop job - if job_id: - data = agave.jobs.manage(jobId=job_id, body='{"action":"stop"}') - - # submit job - elif job_post: - - # cleaning archive path value - if 'archivePath' in job_post: - parsed = urlparse(job_post['archivePath']) - if parsed.path.startswith('/'): - # strip leading '/' - archive_path = parsed.path[1:] - else: - archive_path = parsed.path - - job_post['archivePath'] = archive_path - - if parsed.netloc: - job_post['archiveSystem'] = parsed.netloc - else: - job_post['archivePath'] = \ - '{}/archive/jobs/{}/${{JOB_NAME}}-${{JOB_ID}}'.format( - request.user.username, - datetime.now().strftime('%Y-%m-%d')) - - # check for running licensed apps - lic_type = _app_license_type(job_post['appId']) - if lic_type is not None: - _, license_models = get_license_info() - license_model = [x for x in license_models if x.license_type == lic_type][0] - lic = license_model.objects.filter(user=request.user).first() - job_post['parameters']['_license'] = lic.license_as_str() - - # url encode inputs - if job_post['inputs']: - for key, value in six.iteritems(job_post['inputs']): - if type(value) == list: - inputs = [] - for val in value: - parsed = urlparse(val) - if parsed.scheme: - inputs.append('{}://{}{}'.format( - parsed.scheme, parsed.netloc, urllib.parse.quote(parsed.path))) - else: - inputs.append(urllib.parse.quote(parsed.path)) - job_post['inputs'][key] = inputs - else: - parsed = urlparse(value) - if parsed.scheme: - job_post['inputs'][key] = '{}://{}{}'.format( - parsed.scheme, parsed.netloc, urllib.parse.quote(parsed.path)) - else: - job_post['inputs'][key] = urllib.parse.quote(parsed.path) - - if settings.DEBUG: - wh_base_url = settings.WEBHOOK_POST_URL.strip('/') + '/webhooks/' - jobs_wh_url = settings.WEBHOOK_POST_URL + reverse('designsafe_api:jobs_wh_handler') - else: - wh_base_url = request.build_absolute_uri('/webhooks/') - jobs_wh_url = request.build_absolute_uri(reverse('designsafe_api:jobs_wh_handler')) - - job_post['parameters']['_webhook_base_url'] = wh_base_url - - # Remove any params from job_post that are not in appDef - for param, _ in list(job_post['parameters'].items()): - if not any(p['id'] == param for p in job_post['appDefinition']['parameters']): - del job_post['parameters'][param] - - del job_post['appDefinition'] - - job_post['notifications'] = [ - {'url': jobs_wh_url, - 'event': e} - for e in ["PENDING", "QUEUED", "SUBMITTING", "PROCESSING_INPUTS", "STAGED", "RUNNING", "KILLED", "FAILED", "STOPPED", "FINISHED", "BLOCKED"]] - - try: - data = submit_job(request, request.user.username, job_post) - except JobSubmitError as e: - data = e.json() - logger.error('Failed to submit job {0}'.format(data)) - return HttpResponse(json.dumps(data), - content_type='application/json', - status=e.status_code) - - # list jobs (via POST?) - else: - limit = request.GET.get('limit', 10) - offset = request.GET.get('offset', 0) - data = agave.jobs.list(limit=limit, offset=offset) - - elif request.method == 'GET': - job_id = request.GET.get('job_id') - - # get specific job info - if job_id: - data = agave.jobs.get(jobId=job_id) - q = {"associationIds": job_id} - job_meta = agave.meta.listMetadata(q=json.dumps(q)) - data['_embedded'] = {"metadata": job_meta} - - archive_system_path = '{}/{}'.format(data['archiveSystem'], - data['archivePath']) - data['archiveUrl'] = reverse( - 'designsafe_data:data_depot') - data['archiveUrl'] += 'agave/{}/'.format(archive_system_path) - - # list jobs - else: - limit = request.GET.get('limit', 10) - offset = request.GET.get('offset', 0) - data = agave.jobs.list(limit=limit, offset=offset) - else: - return HttpResponse('Unexpected service: %s' % service, status=400) - - elif service == 'ipynb': - put = json.loads(request.body) - dir_path = put.get('file_path') - system = put.get('system') - data = WorkspaceUtils.setup_identity_file( - request.user.username, - agave, - system, - dir_path - ) - elif service == 'description': - app_id = request.GET.get('app_id') - try: - data = AppDescription.objects.get(appid=app_id).desc_to_dict() - except ObjectDoesNotExist: - return HttpResponse('No description found for {}'.format(app_id), status=200) - else: - return HttpResponse('Unexpected service: %s' % service, status=400) - except HTTPError as e: - logger.exception( - 'Failed to execute %s API call due to HTTPError=%s\n%s', - service, - e, - e.response.content - ) - return HttpResponse(json.dumps(e), - content_type='application/json', - status=400) - except AgaveException as e: - logger.exception('Failed to execute {0} API call due to AgaveException={1}'.format( - service, e)) - return HttpResponse(json.dumps(e), content_type='application/json', - status=400) - except Exception as e: - logger.exception('Failed to execute {0} API call due to Exception={1}'.format( - service, e)) - return HttpResponse( - json.dumps({'status': 'error', 'message': '{}'.format(e)}), - content_type='application/json', status=400) - - return HttpResponse(json.dumps(data, cls=DjangoJSONEncoder), - content_type='application/json') - - -def process_notification(request, pk, **kwargs): - n = Notification.objects.get(pk=pk) - extra = n.extra_content - logger.info('extra: {}'.format(extra)) - archiveSystem = extra['archiveSystem'] - archivePath = extra['archivePath'] - - archive_id = '%s/%s' % (archiveSystem, archivePath) - - target_path = reverse('designsafe_data:data_depot') + 'agave/' + archive_id + '/' - - return redirect(target_path) + @method_decorator(ensure_csrf_cookie) + def dispatch(self, request, *args, **kwargs): + """Overwrite dispatch to ensure csrf cookie""" + return super(WorkspaceView, self).dispatch(request, *args, **kwargs) diff --git a/designsafe/asgi.py b/designsafe/asgi.py index 4916d71d1b..e79c37d877 100644 --- a/designsafe/asgi.py +++ b/designsafe/asgi.py @@ -16,7 +16,6 @@ from django.urls import re_path from designsafe.apps.signals.websocket_consumers import DesignsafeWebsocketConsumer -#from chat.routing import websocket_urlpatterns os.environ.setdefault("DJANGO_SETTINGS_MODULE", "designsafe.settings") websocket_urlpatterns = [ diff --git a/designsafe/conftest.py b/designsafe/conftest.py index 3b327245b1..d32b49dc54 100644 --- a/designsafe/conftest.py +++ b/designsafe/conftest.py @@ -1,44 +1,63 @@ +"""Base User pytest fixtures""" + import pytest +import os +import json +from unittest.mock import patch from django.conf import settings -from designsafe.apps.auth.models import AgaveOAuthToken +from designsafe.apps.auth.models import TapisOAuthToken + @pytest.fixture -def mock_agave_client(mocker): - yield mocker.patch('designsafe.apps.auth.models.AgaveOAuthToken.client', autospec=True) +def mock_tapis_client(mocker): + """Tapis client fixture""" + yield mocker.patch( + "designsafe.apps.auth.models.TapisOAuthToken.client", autospec=True + ) @pytest.fixture -def regular_user(django_user_model, mock_agave_client): - django_user_model.objects.create_user(username="username", - password="password", - first_name="Firstname", - last_name="Lastname", - email="user@user.com") - django_user_model.objects.create_user(username="username2", - password="password2", - first_name="Firstname2", - last_name="Lastname2", - email="user@user.com2") +def regular_user(django_user_model, mock_tapis_client): + """Normal User fixture""" + django_user_model.objects.create_user( + username="username", + password="password", + first_name="Firstname", + last_name="Lastname", + email="user@user.com", + ) user = django_user_model.objects.get(username="username") - token = AgaveOAuthToken.objects.create( + TapisOAuthToken.objects.create( user=user, - token_type="bearer", - scope="default", access_token="1234fsf", refresh_token="123123123", expires_in=14400, - created=1523633447) - token.save() + created=1523633447, + ) yield user +@pytest.fixture +def regular_user_using_jwt(regular_user, client): + """Fixture for regular user who is using jwt for authenticated requests""" + with patch('designsafe.apps.api.decorators.Tapis') as mock_tapis: + # Mock the Tapis's validate_token method within the tapis_jwt_login decorator + mock_validate_token = mock_tapis.return_value.validate_token + mock_validate_token.return_value = {"tapis/username": regular_user.username} + + client.defaults['HTTP_X_TAPIS_TOKEN'] = 'fake_token_string' + + yield client + + @pytest.fixture def project_admin_user(django_user_model): - django_user_model.objects.create_user(username="test_prjadmin", - password="password", - first_name="Project", - last_name="Admin", + django_user_model.objects.create_user( + username="test_prjadmin", + password="password", + first_name="Project", + last_name="Admin", ) user = django_user_model.objects.get(username="test_prjadmin") yield user @@ -48,3 +67,16 @@ def project_admin_user(django_user_model): def authenticated_user(client, regular_user): client.force_login(regular_user) yield regular_user + + +@pytest.fixture +def tapis_tokens_create_mock(): + yield json.load( + open( + os.path.join( + settings.BASE_DIR, + "designsafe/fixtures/tapis/auth/create-tokens-response.json", + ), + "r", + ) + ) diff --git a/designsafe/fixtures/auth.json b/designsafe/fixtures/auth.json new file mode 100644 index 0000000000..b338bfb9e9 --- /dev/null +++ b/designsafe/fixtures/auth.json @@ -0,0 +1,35 @@ +[ + { + "model": "designsafe_auth.tapisoauthtoken", + "pk": 1, + "fields": { + "user": 1, + "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiJkMGU1YWZiZi05Yzk3LTQyOTMtOTNlMS1jYWIyYzAxY2JhMDAiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJhY2Nlc3MiLCJ0YXBpcy9kZWxlZ2F0aW9uIjpmYWxzZSwidGFwaXMvZGVsZWdhdGlvbl9zdWIiOm51bGwsInRhcGlzL3VzZXJuYW1lIjoidGVzdHVzZXIyMDAiLCJ0YXBpcy9hY2NvdW50X3R5cGUiOiJ1c2VyIiwiZXhwIjoxNjU2MDE5MzM1fQ.2mevJWnoS-nlUNfna17berL1HKCHKaPuX6BGi8RZQTQV2meFRLNhAu8B0nDJvROTqYiHna23N2h_FEgS51kRhpwL8N3zTuguh2cT090GxzCFw1QnI1V2rNK4zZjvxagciJxov8SbaOgta6H6_AUentKi_NFjpYTerPRjCDkuCwYitvGOJdzTUFY7cn8SX6JQvlRkcwQ7I0bfC5JN5m5Q0trPD5r2-VDIElI5JVY_isMMT9O5-lT1HTIN1BCYoOnLPgza6vkZeWdArsW9bcvpMANjDlK3mWFtc1fEybN6O3c9RaxRj8GO8zNoyngNH7h6DXeEGdsVJcrt9VWI-nW8iA", + "refresh_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiIwYTYzNTAxOS1mNTllLTQxMjItOGUwNi0zZmRkYTNmMzYzNWEiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL2luaXRpYWxfdHRsIjo2MDAsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJyZWZyZXNoIiwiZXhwIjoxNjU2MDE5MzM1LCJ0YXBpcy9hY2Nlc3NfdG9rZW4iOnsianRpIjoiZDBlNWFmYmYtOWM5Ny00MjkzLTkzZTEtY2FiMmMwMWNiYTAwIiwiaXNzIjoiaHR0cHM6Ly9kZXYuZGV2ZWxvcC50YXBpcy5pby92My90b2tlbnMiLCJzdWIiOiJ0ZXN0dXNlcjIwMEBkZXYiLCJ0YXBpcy90ZW5hbnRfaWQiOiJkZXYiLCJ0YXBpcy90b2tlbl90eXBlIjoiYWNjZXNzIiwidGFwaXMvZGVsZWdhdGlvbiI6ZmFsc2UsInRhcGlzL2RlbGVnYXRpb25fc3ViIjpudWxsLCJ0YXBpcy91c2VybmFtZSI6InRlc3R1c2VyMjAwIiwidGFwaXMvYWNjb3VudF90eXBlIjoidXNlciIsInR0bCI6NjAwfX0.WI9vfN6SPNJwDR9uOaJ16quGzyKl-RWoaDwbOaQa1gpSQoutw8lBqsifzUb0WEJ9fqg8ZWAwbuu-IJikXTiwOiUqWy-09yHxNtCFpBARpY-jurMe20HbDCSlPGICpf8Bend-3tMSnf5c9JyuAgbVx1fnqSjhY3V7yiTVzCur-mOWqI47TiflDnddPscyQj7HBawwadinSiSwQKbnXw2FNkRIdKRrCEOaecKaZ-Hb69vHbi-A3D-HP80nhZzuQW8vzg0L_3cyGOh_Y-8qu22_21UfJwS_nWEizjrs9WTU5hCGpn2Da8U035gk01eC4S9J_WIhZjUhBRneB14QfgTNvg", + "expires_in": 1325391984000, + "created": 1536692280 + } + }, + { + "model": "designsafe_auth.tapisoauthtoken", + "pk": 2, + "fields": { + "user": 2, + "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiJkMGU1YWZiZi05Yzk3LTQyOTMtOTNlMS1jYWIyYzAxY2JhMDAiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJhY2Nlc3MiLCJ0YXBpcy9kZWxlZ2F0aW9uIjpmYWxzZSwidGFwaXMvZGVsZWdhdGlvbl9zdWIiOm51bGwsInRhcGlzL3VzZXJuYW1lIjoidGVzdHVzZXIyMDAiLCJ0YXBpcy9hY2NvdW50X3R5cGUiOiJ1c2VyIiwiZXhwIjoxNjU2MDE5MzM1fQ.2mevJWnoS-nlUNfna17berL1HKCHKaPuX6BGi8RZQTQV2meFRLNhAu8B0nDJvROTqYiHna23N2h_FEgS51kRhpwL8N3zTuguh2cT090GxzCFw1QnI1V2rNK4zZjvxagciJxov8SbaOgta6H6_AUentKi_NFjpYTerPRjCDkuCwYitvGOJdzTUFY7cn8SX6JQvlRkcwQ7I0bfC5JN5m5Q0trPD5r2-VDIElI5JVY_isMMT9O5-lT1HTIN1BCYoOnLPgza6vkZeWdArsW9bcvpMANjDlK3mWFtc1fEybN6O3c9RaxRj8GO8zNoyngNH7h6DXeEGdsVJcrt9VWI-nW8iA", + "refresh_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiIwYTYzNTAxOS1mNTllLTQxMjItOGUwNi0zZmRkYTNmMzYzNWEiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL2luaXRpYWxfdHRsIjo2MDAsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJyZWZyZXNoIiwiZXhwIjoxNjU2MDE5MzM1LCJ0YXBpcy9hY2Nlc3NfdG9rZW4iOnsianRpIjoiZDBlNWFmYmYtOWM5Ny00MjkzLTkzZTEtY2FiMmMwMWNiYTAwIiwiaXNzIjoiaHR0cHM6Ly9kZXYuZGV2ZWxvcC50YXBpcy5pby92My90b2tlbnMiLCJzdWIiOiJ0ZXN0dXNlcjIwMEBkZXYiLCJ0YXBpcy90ZW5hbnRfaWQiOiJkZXYiLCJ0YXBpcy90b2tlbl90eXBlIjoiYWNjZXNzIiwidGFwaXMvZGVsZWdhdGlvbiI6ZmFsc2UsInRhcGlzL2RlbGVnYXRpb25fc3ViIjpudWxsLCJ0YXBpcy91c2VybmFtZSI6InRlc3R1c2VyMjAwIiwidGFwaXMvYWNjb3VudF90eXBlIjoidXNlciIsInR0bCI6NjAwfX0.WI9vfN6SPNJwDR9uOaJ16quGzyKl-RWoaDwbOaQa1gpSQoutw8lBqsifzUb0WEJ9fqg8ZWAwbuu-IJikXTiwOiUqWy-09yHxNtCFpBARpY-jurMe20HbDCSlPGICpf8Bend-3tMSnf5c9JyuAgbVx1fnqSjhY3V7yiTVzCur-mOWqI47TiflDnddPscyQj7HBawwadinSiSwQKbnXw2FNkRIdKRrCEOaecKaZ-Hb69vHbi-A3D-HP80nhZzuQW8vzg0L_3cyGOh_Y-8qu22_21UfJwS_nWEizjrs9WTU5hCGpn2Da8U035gk01eC4S9J_WIhZjUhBRneB14QfgTNvg", + "expires_in": 1325391984000, + "created": 1536700041 + } + }, + { + "model": "designsafe_auth.tapisoauthtoken", + "pk": 3, + "fields": { + "user": 3, + "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiJkMGU1YWZiZi05Yzk3LTQyOTMtOTNlMS1jYWIyYzAxY2JhMDAiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJhY2Nlc3MiLCJ0YXBpcy9kZWxlZ2F0aW9uIjpmYWxzZSwidGFwaXMvZGVsZWdhdGlvbl9zdWIiOm51bGwsInRhcGlzL3VzZXJuYW1lIjoidGVzdHVzZXIyMDAiLCJ0YXBpcy9hY2NvdW50X3R5cGUiOiJ1c2VyIiwiZXhwIjoxNjU2MDE5MzM1fQ.2mevJWnoS-nlUNfna17berL1HKCHKaPuX6BGi8RZQTQV2meFRLNhAu8B0nDJvROTqYiHna23N2h_FEgS51kRhpwL8N3zTuguh2cT090GxzCFw1QnI1V2rNK4zZjvxagciJxov8SbaOgta6H6_AUentKi_NFjpYTerPRjCDkuCwYitvGOJdzTUFY7cn8SX6JQvlRkcwQ7I0bfC5JN5m5Q0trPD5r2-VDIElI5JVY_isMMT9O5-lT1HTIN1BCYoOnLPgza6vkZeWdArsW9bcvpMANjDlK3mWFtc1fEybN6O3c9RaxRj8GO8zNoyngNH7h6DXeEGdsVJcrt9VWI-nW8iA", + "refresh_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiIwYTYzNTAxOS1mNTllLTQxMjItOGUwNi0zZmRkYTNmMzYzNWEiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL2luaXRpYWxfdHRsIjo2MDAsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJyZWZyZXNoIiwiZXhwIjoxNjU2MDE5MzM1LCJ0YXBpcy9hY2Nlc3NfdG9rZW4iOnsianRpIjoiZDBlNWFmYmYtOWM5Ny00MjkzLTkzZTEtY2FiMmMwMWNiYTAwIiwiaXNzIjoiaHR0cHM6Ly9kZXYuZGV2ZWxvcC50YXBpcy5pby92My90b2tlbnMiLCJzdWIiOiJ0ZXN0dXNlcjIwMEBkZXYiLCJ0YXBpcy90ZW5hbnRfaWQiOiJkZXYiLCJ0YXBpcy90b2tlbl90eXBlIjoiYWNjZXNzIiwidGFwaXMvZGVsZWdhdGlvbiI6ZmFsc2UsInRhcGlzL2RlbGVnYXRpb25fc3ViIjpudWxsLCJ0YXBpcy91c2VybmFtZSI6InRlc3R1c2VyMjAwIiwidGFwaXMvYWNjb3VudF90eXBlIjoidXNlciIsInR0bCI6NjAwfX0.WI9vfN6SPNJwDR9uOaJ16quGzyKl-RWoaDwbOaQa1gpSQoutw8lBqsifzUb0WEJ9fqg8ZWAwbuu-IJikXTiwOiUqWy-09yHxNtCFpBARpY-jurMe20HbDCSlPGICpf8Bend-3tMSnf5c9JyuAgbVx1fnqSjhY3V7yiTVzCur-mOWqI47TiflDnddPscyQj7HBawwadinSiSwQKbnXw2FNkRIdKRrCEOaecKaZ-Hb69vHbi-A3D-HP80nhZzuQW8vzg0L_3cyGOh_Y-8qu22_21UfJwS_nWEizjrs9WTU5hCGpn2Da8U035gk01eC4S9J_WIhZjUhBRneB14QfgTNvg", + "expires_in": 1325391984000, + "created": 1536700084 + } + } +] diff --git a/designsafe/fixtures/tapis/auth/create-tokens-response.json b/designsafe/fixtures/tapis/auth/create-tokens-response.json new file mode 100644 index 0000000000..00191a6106 --- /dev/null +++ b/designsafe/fixtures/tapis/auth/create-tokens-response.json @@ -0,0 +1,20 @@ +{ + "message": "Token created successfully.", + "metadata": {}, + "result": { + "access_token": { + "access_token": "eyJhbGci---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------", + "expires_at": "2024-03-01T03:47:18.611914+00:00", + "expires_in": 14400, + "jti": "108792e6-2a77-41ad-964c-f289cc2198f7" + }, + "refresh_token": { + "expires_at": "2025-02-28T23:47:18.711146+00:00", + "expires_in": 31536000, + "jti": "69992b30-3b3b-477a-ba22-3bd2a8203791", + "refresh_token": "eyJhbGci---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------" + } + }, + "status": "success", + "version": "dev" +} diff --git a/designsafe/fixtures/user-data.json b/designsafe/fixtures/user-data.json new file mode 100644 index 0000000000..5d1cb1c5db --- /dev/null +++ b/designsafe/fixtures/user-data.json @@ -0,0 +1,103 @@ +[ + { + "fields": { + "username": "ds_admin", + "first_name": "DesignSafe", + "last_name": "Admin", + "is_active": true, + "is_superuser": true, + "is_staff": true, + "last_login": "2016-03-01T00:00:00.000Z", + "groups": [], + "user_permissions": [], + "password": "", + "email": "admin@designsafe-ci.org", + "date_joined": "2016-03-01T00:00:00.000Z" + }, + "model": "auth.user", + "pk": 1 + }, + { + "fields": { + "username": "envision", + "first_name": "DesignSafe", + "last_name": "Admin", + "is_active": true, + "is_superuser": true, + "is_staff": true, + "last_login": "2016-03-01T00:00:00.000Z", + "groups": [], + "user_permissions": [], + "password": "", + "email": "admin@designsafe-ci.org", + "date_joined": "2016-03-01T00:00:00.000Z" + }, + "model": "auth.user", + "pk": 3 + }, + { + "fields": { + "username": "ds_user", + "first_name": "DesignSafe", + "last_name": "User", + "is_active": true, + "is_superuser": false, + "is_staff": false, + "last_login": "2016-03-01T00:00:00.000Z", + "groups": [], + "user_permissions": [], + "password": "", + "email": "user@designsafe-ci.org", + "date_joined": "2016-03-01T00:00:00.000Z" + }, + "model": "auth.user", + "pk": 2 + }, + { + "fields": { + "user": 2, + "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiJkMGU1YWZiZi05Yzk3LTQyOTMtOTNlMS1jYWIyYzAxY2JhMDAiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJhY2Nlc3MiLCJ0YXBpcy9kZWxlZ2F0aW9uIjpmYWxzZSwidGFwaXMvZGVsZWdhdGlvbl9zdWIiOm51bGwsInRhcGlzL3VzZXJuYW1lIjoidGVzdHVzZXIyMDAiLCJ0YXBpcy9hY2NvdW50X3R5cGUiOiJ1c2VyIiwiZXhwIjoxNjU2MDE5MzM1fQ.2mevJWnoS-nlUNfna17berL1HKCHKaPuX6BGi8RZQTQV2meFRLNhAu8B0nDJvROTqYiHna23N2h_FEgS51kRhpwL8N3zTuguh2cT090GxzCFw1QnI1V2rNK4zZjvxagciJxov8SbaOgta6H6_AUentKi_NFjpYTerPRjCDkuCwYitvGOJdzTUFY7cn8SX6JQvlRkcwQ7I0bfC5JN5m5Q0trPD5r2-VDIElI5JVY_isMMT9O5-lT1HTIN1BCYoOnLPgza6vkZeWdArsW9bcvpMANjDlK3mWFtc1fEybN6O3c9RaxRj8GO8zNoyngNH7h6DXeEGdsVJcrt9VWI-nW8iA", + "refresh_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJqdGkiOiIwYTYzNTAxOS1mNTllLTQxMjItOGUwNi0zZmRkYTNmMzYzNWEiLCJpc3MiOiJodHRwczovL2Rldi5kZXZlbG9wLnRhcGlzLmlvL3YzL3Rva2VucyIsInN1YiI6InRlc3R1c2VyMjAwQGRldiIsInRhcGlzL2luaXRpYWxfdHRsIjo2MDAsInRhcGlzL3RlbmFudF9pZCI6ImRldiIsInRhcGlzL3Rva2VuX3R5cGUiOiJyZWZyZXNoIiwiZXhwIjoxNjU2MDE5MzM1LCJ0YXBpcy9hY2Nlc3NfdG9rZW4iOnsianRpIjoiZDBlNWFmYmYtOWM5Ny00MjkzLTkzZTEtY2FiMmMwMWNiYTAwIiwiaXNzIjoiaHR0cHM6Ly9kZXYuZGV2ZWxvcC50YXBpcy5pby92My90b2tlbnMiLCJzdWIiOiJ0ZXN0dXNlcjIwMEBkZXYiLCJ0YXBpcy90ZW5hbnRfaWQiOiJkZXYiLCJ0YXBpcy90b2tlbl90eXBlIjoiYWNjZXNzIiwidGFwaXMvZGVsZWdhdGlvbiI6ZmFsc2UsInRhcGlzL2RlbGVnYXRpb25fc3ViIjpudWxsLCJ0YXBpcy91c2VybmFtZSI6InRlc3R1c2VyMjAwIiwidGFwaXMvYWNjb3VudF90eXBlIjoidXNlciIsInR0bCI6NjAwfX0.WI9vfN6SPNJwDR9uOaJ16quGzyKl-RWoaDwbOaQa1gpSQoutw8lBqsifzUb0WEJ9fqg8ZWAwbuu-IJikXTiwOiUqWy-09yHxNtCFpBARpY-jurMe20HbDCSlPGICpf8Bend-3tMSnf5c9JyuAgbVx1fnqSjhY3V7yiTVzCur-mOWqI47TiflDnddPscyQj7HBawwadinSiSwQKbnXw2FNkRIdKRrCEOaecKaZ-Hb69vHbi-A3D-HP80nhZzuQW8vzg0L_3cyGOh_Y-8qu22_21UfJwS_nWEizjrs9WTU5hCGpn2Da8U035gk01eC4S9J_WIhZjUhBRneB14QfgTNvg", + "expires_in": 1325391984000, + "created": 1536700084 + }, + "model": "designsafe_auth.tapisoauthtoken", + "pk": 1 + }, + { + "fields": { + "nickname": "Test Token", + "user": 2, + "created": "2016-09-06T00:00:00.000Z" + }, + "model": "token_access.token", + "pk": "5da84493fa0037de0945631d1f9df5c00cdcac49" + }, + { + "model": "designsafe_accounts.designsafeprofile", + "pk": 5610, + "fields": { + "user": 2, + "ethnicity": "Asian", + "gender": "Male", + "agree_to_account_limit": "2020-07-02T23:41:19.342Z", + "bio": null, + "website": null, + "orcid_id": null, + "professional_level": null, + "update_required": true, + "last_updated": "2020-07-02T23:41:19.343Z", + "nh_interests": [], + "nh_technical_domains": [], + "research_activities": [] + } + }, + { + "fields": { + "announcements": true, + "user": 2 + }, + "model": "designsafe_accounts.notificationpreferences", + "pk": 1 + } +] diff --git a/designsafe/libs/elasticsearch/docs.py b/designsafe/libs/elasticsearch/docs.py index 2dad2365d0..13e426ce41 100644 --- a/designsafe/libs/elasticsearch/docs.py +++ b/designsafe/libs/elasticsearch/docs.py @@ -3,7 +3,7 @@ :synopsis: Wrapper classes for ES different doc types. """ -from future.utils import python_2_unicode_compatible + import logging import json from django.conf import settings diff --git a/designsafe/libs/elasticsearch/docs/base.py b/designsafe/libs/elasticsearch/docs/base.py index c50d85f8f2..1b422da64a 100644 --- a/designsafe/libs/elasticsearch/docs/base.py +++ b/designsafe/libs/elasticsearch/docs/base.py @@ -1,8 +1,8 @@ -from future.utils import python_2_unicode_compatible + import logging -@python_2_unicode_compatible + class BaseESResource(object): """Base class used to represent an Elastic Search resource. @@ -14,7 +14,7 @@ class BaseESResource(object): """ def __init__(self, wrapped_doc=None, **kwargs): self._wrap(wrapped_doc, **kwargs) - + def to_dict(self): """Return wrapped doc as dict""" return self._wrapped.to_dict() @@ -32,7 +32,7 @@ def __getattr__(self, name): """ _wrapped = object.__getattribute__(self, '_wrapped') if _wrapped and hasattr(_wrapped, name): - return getattr(_wrapped, name) + return getattr(_wrapped, name) else: return object.__getattribute__(self, name) @@ -43,4 +43,4 @@ def __setattr__(self, name, value): return else: object.__setattr__(self, name, value) - return \ No newline at end of file + return diff --git a/designsafe/libs/elasticsearch/docs/files.py b/designsafe/libs/elasticsearch/docs/files.py index ffdb04e193..c61e585cea 100644 --- a/designsafe/libs/elasticsearch/docs/files.py +++ b/designsafe/libs/elasticsearch/docs/files.py @@ -3,7 +3,7 @@ :synopsis: Wrapper classes for ES ``files`` doc type. """ -from future.utils import python_2_unicode_compatible + import logging import os from django.conf import settings @@ -15,7 +15,7 @@ logger = logging.getLogger(__name__) #pylint: enable=invalid-name -@python_2_unicode_compatible + class BaseESFile(BaseESResource): """Wrapper class for Elastic Search indexed file. @@ -61,25 +61,25 @@ def _index_cls(cls, reindex): def children(self, limit=100): """ - Yield all children (i.e. documents whose basePath matches self.path) by + Yield all children (i.e. documents whose basePath matches self.path) by paginating with the search_after api. """ res, search_after = self._index_cls(self._reindex).children( self.username, self.system, - self.path, + self.path, limit=limit) for doc in res: yield BaseESFile(self.username, wrapped_doc=doc) while not len(res) < limit: # If the number or results doesn't match the limit, we're done paginating. - # Retrieve the sort key from the last element then use + # Retrieve the sort key from the last element then use # search_after to get the next page of results res, search_after = self._index_cls(self._reindex).children( self.username, self.system, - self.path, + self.path, limit=limit, search_after=search_after) for doc in res: @@ -101,4 +101,4 @@ def delete(self): for child in children: if child.path != self.path: child.delete() - self._wrapped.delete() \ No newline at end of file + self._wrapped.delete() diff --git a/designsafe/libs/elasticsearch/docs/publication_legacy.py b/designsafe/libs/elasticsearch/docs/publication_legacy.py index 1255100ff2..80185aa833 100644 --- a/designsafe/libs/elasticsearch/docs/publication_legacy.py +++ b/designsafe/libs/elasticsearch/docs/publication_legacy.py @@ -3,7 +3,7 @@ :synopsis: Wrapper classes for ES ``files`` doc type. """ -from future.utils import python_2_unicode_compatible + import logging import os import zipfile @@ -17,7 +17,7 @@ logger = logging.getLogger(__name__) #pylint: enable=invalid-name -@python_2_unicode_compatible + class BaseESPublicationLegacy(BaseESResource): """Wrapper class for Elastic Search indexed NEES publication. @@ -67,10 +67,10 @@ def to_file(self): publication_dict = self.to_dict() project_dict = {} - for key in ['deleted', 'description', 'endDate', 'facility', 'name', + for key in ['deleted', 'description', 'endDate', 'facility', 'name', 'organization', 'pis', 'project', 'projectPath', 'publications', 'startDate', 'system', 'title', 'sponsor']: - + if key in publication_dict: project_dict[key] = publication_dict[key] @@ -97,5 +97,5 @@ def to_file(self): 'experiments': experiments, 'project': project_dict }} - + return dict_obj diff --git a/designsafe/libs/elasticsearch/docs/publications.py b/designsafe/libs/elasticsearch/docs/publications.py index 396eb1a32d..e822f11487 100644 --- a/designsafe/libs/elasticsearch/docs/publications.py +++ b/designsafe/libs/elasticsearch/docs/publications.py @@ -5,7 +5,7 @@ """ import logging -from future.utils import python_2_unicode_compatible + from designsafe.apps.data.models.elasticsearch import IndexedPublication from designsafe.libs.elasticsearch.docs.base import BaseESResource from designsafe.libs.elasticsearch.exceptions import DocumentNotFound @@ -16,7 +16,7 @@ # pylint: enable=invalid-name -@python_2_unicode_compatible + class BaseESPublication(BaseESResource): """Wrapper class for Elastic Search indexed publication. @@ -157,7 +157,7 @@ def to_file(self): except: dict_obj['meta']['piLabel'] = '({pi})'.format(pi=pi) return dict_obj - + def entity_keys(self, publishable=False): """Type specific keys for publication""" diff --git a/designsafe/libs/elasticsearch/indices.py b/designsafe/libs/elasticsearch/indices.py index 8e51a1ba3d..2ae93abe64 100644 --- a/designsafe/libs/elasticsearch/indices.py +++ b/designsafe/libs/elasticsearch/indices.py @@ -3,7 +3,7 @@ :synopsis: Wrapper classes for ES different doc types. """ -from future.utils import python_2_unicode_compatible + import logging import json import six diff --git a/designsafe/libs/elasticsearch/utils.py b/designsafe/libs/elasticsearch/utils.py index c97412663f..3504460691 100644 --- a/designsafe/libs/elasticsearch/utils.py +++ b/designsafe/libs/elasticsearch/utils.py @@ -1,4 +1,4 @@ -from future.utils import python_2_unicode_compatible + import urllib.request, urllib.parse, urllib.error from elasticsearch import Elasticsearch import logging @@ -154,10 +154,24 @@ def iterate_level(client, system, path, limit=100): offset = 0 while True: - page = client.files.list(systemId=system, - filePath=urllib.parse.quote(path), + _page = client.files.listFiles(systemId=system, + path=urllib.parse.quote(path), offset=offset, limit=limit) + + page = [{ + 'system': system, + 'type': 'dir' if f.type == 'dir' else 'file', + 'format': 'folder' if f.type == 'dir' else 'raw', + 'mimeType': f.mimeType, + 'path': f"/{f.path}", + 'name': f.name, + 'length': f.size, + 'lastModified': f.lastModified, + '_links': { + 'self': {'href': f.url} + }} for f in _page] + yield from page offset += limit if len(page) != limit: @@ -165,7 +179,7 @@ def iterate_level(client, system, path, limit=100): break # pylint: disable=too-many-locals -@python_2_unicode_compatible + def walk_levels(client, system, path, bottom_up=False, ignore_hidden=False, paths_to_ignore=None): """Walk a pth in an Agave storgae system. @@ -298,14 +312,14 @@ def index_level(path, folders, files, systemId, reindex=False): logger.debug(children_paths) delete_recursive(hit.system, hit.path) -@python_2_unicode_compatible + def repair_path(name, path): if not path.endswith(name): path = path + '/' + name path = path.strip('/') return '/{path}'.format(path=path) -@python_2_unicode_compatible + def repair_paths(limit=1000): from designsafe.apps.data.models.elasticsearch import IndexedFile from elasticsearch import Elasticsearch diff --git a/designsafe/libs/fedora/fedora_operations.py b/designsafe/libs/fedora/fedora_operations.py index a2eeb82d94..7313182d95 100644 --- a/designsafe/libs/fedora/fedora_operations.py +++ b/designsafe/libs/fedora/fedora_operations.py @@ -28,6 +28,9 @@ "abstract": { "@id": "http://purl.org/dc/elements/1.1/abstract" }, + "accessRights": { + "@id": "http://purl.org/dc/elements/1.1/accessRights" + }, "available": { "@id": "http://purl.org/dc/elements/1.1/available" }, diff --git a/designsafe/libs/tapis/serializers.py b/designsafe/libs/tapis/serializers.py new file mode 100644 index 0000000000..51c31874df --- /dev/null +++ b/designsafe/libs/tapis/serializers.py @@ -0,0 +1,41 @@ +""" +.. module: libs.tapis.serializers + :synopsis: Serialize a Tapis object into a dict. +""" + +import logging +import json +from tapipy.tapis import TapisResult + +logger = logging.getLogger(__name__) + + +class BaseTapisResultSerializer(json.JSONEncoder): + """Class to serialize a Tapis response object""" + + def _serialize(self, obj): + if isinstance(obj, TapisResult): + _wrapped = vars(obj) + for key, value in _wrapped.items(): + if isinstance(value, TapisResult): + _wrapped[key] = self._serialize(value) + elif isinstance(value, list): + for index, item in enumerate(value): + value[index] = self._serialize(item) + elif isinstance(value, dict): + for n_key, n_value in value.items(): + value[n_key] = self._serialize(n_value) + return _wrapped + + if isinstance(obj, list): + for index, item in enumerate(obj): + obj[index] = self._serialize(item) + elif isinstance(obj, dict): + for key, value in obj.items(): + obj[key] = self._serialize(value) + return obj + + def default(self, o): + if isinstance(o, (TapisResult, list, dict)): + return self._serialize(o) + return super().default(o) diff --git a/designsafe/settings/celery_settings.py b/designsafe/settings/celery_settings.py index a374b7984c..feb8d51b96 100644 --- a/designsafe/settings/celery_settings.py +++ b/designsafe/settings/celery_settings.py @@ -42,6 +42,8 @@ Queue('files', Exchange('io'), routing_key='io.files'), #Use to queue tasks which mainly call external APIs Queue('api', Exchange('api'), routing_key='api.agave'), + # Use to queue tasks which handle user onboarding + Queue('onboarding', Exchange('onboarding'), routing_key='onboarding'), ) CELERY_TASK_DEFAULT_QUEUE = 'default' CELERY_TASK_DEFAULT_EXCHANGE = 'default' diff --git a/designsafe/settings/common_settings.py b/designsafe/settings/common_settings.py index e37b4cd889..c7be0549f1 100644 --- a/designsafe/settings/common_settings.py +++ b/designsafe/settings/common_settings.py @@ -117,12 +117,12 @@ ) AUTHENTICATION_BACKENDS = ( - 'designsafe.apps.auth.backends.AgaveOAuthBackend', + 'designsafe.apps.auth.backends.TapisOAuthBackend', 'designsafe.apps.auth.backends.TASBackend', 'django.contrib.auth.backends.ModelBackend', ) -LOGIN_REDIRECT_URL = os.environ.get('LOGIN_REDIRECT_URL', '/account/') +LOGIN_REDIRECT_URL = os.environ.get('LOGIN_REDIRECT_URL', '/dashboard/') LOGOUT_REDIRECT_URL = os.environ.get('LOGOUT_REDIRECT_URL', '/auth/logged-out/') CACHES = { @@ -140,7 +140,7 @@ 'django.contrib.auth.middleware.AuthenticationMiddleware', 'designsafe.apps.token_access.middleware.TokenAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', - 'designsafe.apps.auth.middleware.AgaveTokenRefreshMiddleware', + 'designsafe.apps.auth.middleware.TapisTokenRefreshMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.security.SecurityMiddleware', @@ -179,7 +179,6 @@ 'designsafe.context_processors.site_verification', 'designsafe.context_processors.debug', 'designsafe.context_processors.messages', - 'designsafe.apps.auth.context_processors.auth', 'designsafe.apps.cms_plugins.context_processors.cms_section', ], }, @@ -203,7 +202,6 @@ # https://docs.djangoproject.com/en/1.8/ref/settings/#databases if os.environ.get('DATABASE_HOST'): - # mysql connection DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', @@ -231,7 +229,7 @@ ALDRYN_SEARCH_REGISTER_APPHOOK = True from designsafe.settings.nees_settings import NEES_USER_DATABASE -#if NEES_USER_DATABASE['NAME']: +# if NEES_USER_DATABASE['NAME']: # DATABASES['nees_users'] = NEES_USER_DATABASE @@ -348,13 +346,13 @@ 'allowedContent': True } -#MIGRATION_MODULES = { +# MIGRATION_MODULES = { # 'djangocms_file': 'djangocms_file.migrations_django', # 'djangocms_googlemap': 'djangocms_googlemap.migrations_django', # 'djangocms_picture': 'djangocms_picture.migrations_django', # 'djangocms_video': 'djangocms_video.migrations_django', # 'djangocms_style': 'djangocms_style.migrations_django', -#} +# } LOGIN_URL = os.environ.get('LOGIN_URL', '/login/') @@ -497,6 +495,19 @@ TRAM_SERVICES_KEY = os.environ.get('TRAM_SERVICES_KEY', None) TRAM_PROJECT_ID = os.environ.get('TRAM_PROJECT_ID', None) +TAS_CLIENT_KEY = os.environ.get('TAS_CLIENT_KEY', None) +TAS_CLIENT_SECRET = os.environ.get('TAS_CLIENT_SECRET', None) +TAS_URL = os.environ.get('TAS_URL', None) + +# Allocations to exclude +# +ALLOCATIONS_TO_EXCLUDE = ( + os.environ.get("ALLOCATIONS_TO_EXCLUDE", "").split(",") + if os.environ.get("ALLOCATIONS_TO_EXCLUDE") + else ["DesignSafe-DCV"] +) + + ### # Agave Integration # @@ -530,6 +541,26 @@ AGAVE_USER_STORE_ID = os.environ.get('AGAVE_USER_STORE_ID', 'TACC') AGAVE_USE_SANDBOX = os.environ.get('AGAVE_USE_SANDBOX', 'False').lower() == 'true' +TAPIS_SYSTEMS_TO_CONFIGURE = [ + {"system_id": AGAVE_STORAGE_SYSTEM, "path": "{username}", "create_path": True}, + {"system_id": AGAVE_WORKING_SYSTEM, "path": "{username}", "create_path": True}, + {"system_id": "cloud.data", "path": "/ ", "create_path": False}, +] + +# Tapis Client Configuration +PORTAL_ADMIN_USERNAME = os.environ.get('PORTAL_ADMIN_USERNAME') +TAPIS_TENANT_BASEURL = os.environ.get('TAPIS_TENANT_BASEURL') +TAPIS_CLIENT_ID = os.environ.get('TAPIS_CLIENT_ID') +TAPIS_CLIENT_KEY = os.environ.get('TAPIS_CLIENT_KEY') +TAPIS_ADMIN_JWT = os.environ.get('TAPIS_ADMIN_JWT') +TAPIS_TG458981_JWT = os.environ.get('TAPIS_TG458981_JWT') + +KEY_SERVICE_TOKEN = os.environ.get('KEY_SERVICE_TOKEN') + +PORTAL_NAMESPACE = 'DESIGNSAFE' + +PORTAL_JOB_NOTIFICATION_STATES = ["PENDING", "STAGING_INPUTS", "RUNNING", "ARCHIVING", "BLOCKED", "PAUSED", "FINISHED", "CANCELLED", "FAILED"] + DS_ADMIN_USERNAME = os.environ.get('DS_ADMIN_USERNAME') DS_ADMIN_PASSWORD = os.environ.get('DS_ADMIN_PASSWORD') @@ -557,6 +588,8 @@ } } +PROJECT_STORAGE_SYSTEM_CREDENTIALS = json.loads(os.environ.get('PROJECT_SYSTEM_STORAGE_CREDENTIALS', '{}')) + PUBLISHED_SYSTEM = 'designsafe.storage.published' COMMUNITY_SYSTEM = 'designsafe.storage.community' NEES_PUBLIC_SYSTEM = 'nees.public' @@ -568,7 +601,7 @@ RECAPTCHA_PRIVATE_KEY= os.environ.get('DJANGOCMS_FORMS_RECAPTCHA_SECRET_KEY') NOCAPTCHA = True -#FOR RAPID UPLOADS +# FOR RAPID UPLOADS DESIGNSAFE_UPLOAD_PATH = '/corral-repl/tacc/NHERI/uploads' DESIGNSAFE_PROJECTS_PATH = os.environ.get('DESIGNSAFE_PROJECTS_PATH', '/corral-repl/tacc/NHERI/projects/') DESIGNSAFE_PUBLISHED_PATH = os.environ.get('DESIGNSAFE_PUBLISHED_PATH', '/corral-repl/tacc/NHERI/published/') @@ -669,3 +702,5 @@ FEDORA_USERNAME = os.environ.get('FEDORA_USERNAME') FEDORA_PASSWORD = os.environ.get('FEDORA_PASSWORD') FEDORA_CONTAINER= os.environ.get('FEDORA_CONTAINER', 'designsafe-publications-dev') + +CSRF_TRUSTED_ORIGINS = [f"https://{os.environ.get('SESSION_COOKIE_DOMAIN')}"] diff --git a/designsafe/settings/elasticsearch_settings.py b/designsafe/settings/elasticsearch_settings.py index 4d04f8d688..ab3613d66a 100644 --- a/designsafe/settings/elasticsearch_settings.py +++ b/designsafe/settings/elasticsearch_settings.py @@ -43,6 +43,11 @@ 'document': 'designsafe.apps.data.models.elasticsearch.IndexedPublication', 'kwargs': {'index.mapping.total_fields.limit': 3000} }, + 'publications_v2': { + 'alias': ES_INDEX_PREFIX.format('publications_v2'), + 'document': 'designsafe.apps.api.publications_v2.elasticsearch.IndexedPublication', + 'kwargs': {} + }, 'web_content': { 'alias': ES_INDEX_PREFIX.format('web-content'), 'document': 'designsafe.apps.data.models.elasticsearch.IndexedCMSPage', diff --git a/designsafe/settings/external_resource_settings.py b/designsafe/settings/external_resource_settings.py index 79785be4c1..02fdb17164 100644 --- a/designsafe/settings/external_resource_settings.py +++ b/designsafe/settings/external_resource_settings.py @@ -26,3 +26,5 @@ 'user_property': 'user_id', 'credentials_property': 'credential' } +GOOGLE_OAUTH2_CLIENT_SECRET = os.environ.get("GOOGLE_OAUTH2_CLIENT_SECRET", "CHANGE_ME") +GOOGLE_OAUTH2_CLIENT_ID = os.environ.get("GOOGLE_OAUTH2_CLIENT_ID", "CHANGE_ME") \ No newline at end of file diff --git a/designsafe/settings/test_settings.py b/designsafe/settings/test_settings.py index e2b3a73388..d7126fe6a6 100644 --- a/designsafe/settings/test_settings.py +++ b/designsafe/settings/test_settings.py @@ -133,7 +133,7 @@ 'django.contrib.sessions.middleware.SessionMiddleware', 'designsafe.apps.token_access.middleware.TokenAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', - 'designsafe.apps.auth.middleware.AgaveTokenRefreshMiddleware', + 'designsafe.apps.auth.middleware.TapisTokenRefreshMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.security.SecurityMiddleware', @@ -169,7 +169,6 @@ 'designsafe.context_processors.site_verification', 'designsafe.context_processors.debug', 'designsafe.context_processors.messages', - 'designsafe.apps.auth.context_processors.auth', 'designsafe.apps.cms_plugins.context_processors.cms_section', ], }, @@ -237,6 +236,10 @@ MEDIA_ROOT = '/srv/www/designsafe/media/' MEDIA_URL = '/media/' +FIXTURE_DIRS = [ + os.path.join(BASE_DIR, 'designsafe', 'fixtures'), +] + ##### # @@ -447,6 +450,7 @@ AGAVE_TOKEN_SESSION_ID = os.environ.get('AGAVE_TOKEN_SESSION_ID', 'agave_token') AGAVE_SUPER_TOKEN = os.environ.get('AGAVE_SUPER_TOKEN') AGAVE_STORAGE_SYSTEM = os.environ.get('AGAVE_STORAGE_SYSTEM') +AGAVE_WORKING_SYSTEM = os.environ.get('AGAVE_WORKING_SYSTEM') AGAVE_JWT_PUBKEY = os.environ.get('AGAVE_JWT_PUBKEY') AGAVE_JWT_ISSUER = os.environ.get('AGAVE_JWT_ISSUER') @@ -531,7 +535,7 @@ # No token refreshes during testing MIDDLEWARE= [c for c in MIDDLEWARE if c != - 'designsafe.apps.auth.middleware.AgaveTokenRefreshMiddleware'] + 'designsafe.apps.auth.middleware.TapisTokenRefreshMiddleware'] STATIC_ROOT = os.path.join(BASE_DIR, 'static') MEDIA_ROOT = os.path.join(BASE_DIR, '.media') @@ -543,6 +547,23 @@ AGAVE_CLIENT_SECRET = 'example_com_client_secret' AGAVE_SUPER_TOKEN = 'example_com_client_token' AGAVE_STORAGE_SYSTEM = 'storage.example.com' +AGAVE_WORKING_SYSTEM = 'storage.example.work' + +TAPIS_SYSTEMS_TO_CONFIGURE = [ + {"system_id": AGAVE_STORAGE_SYSTEM, "path": "{username}", "create_path": True}, + {"system_id": AGAVE_WORKING_SYSTEM, "path": "{username}", "create_path": True}, + {"system_id": "cloud.data", "path": "/ ", "create_path": False}, +] + +# Tapis Client Configuration +PORTAL_ADMIN_USERNAME = '' +TAPIS_TENANT_BASEURL = 'https://designsafe.tapis.io' +TAPIS_CLIENT_ID = 'client_id' +TAPIS_CLIENT_KEY = 'client_key' +TAPIS_ADMIN_JWT = 'admin_jwt' +TAPIS_TG458981_JWT = 'tg_jwt' + +KEY_SERVICE_TOKEN = '' MIGRATION_MODULES = { 'data': None, @@ -671,6 +692,11 @@ 'alias': ES_INDEX_PREFIX.format('publications'), 'document': 'designsafe.apps.data.models.elasticsearch.IndexedPublication', 'kwargs': {'index.mapping.total_fields.limit': 3000} + }, + 'publications_v2': { + 'alias': ES_INDEX_PREFIX.format('publications_v2'), + 'document': 'designsafe.apps.api.publications_v2.elasticsearch.IndexedPublication', + 'kwargs': {} }, 'web_content': { 'alias': ES_INDEX_PREFIX.format('web-content'), diff --git a/designsafe/sitemaps.py b/designsafe/sitemaps.py index 39bc9d1b1d..ee02d2b6b7 100644 --- a/designsafe/sitemaps.py +++ b/designsafe/sitemaps.py @@ -37,6 +37,7 @@ from django.urls import reverse from designsafe.apps.api.publications.operations import listing as list_publications, neeslisting as list_nees from designsafe.apps.api.agave import get_service_account_client +from designsafe.apps.api.publications_v2.models import Publication # imported urlpatterns from apps from designsafe import urls # from designsafe import urls not working? @@ -168,23 +169,19 @@ def get_urls(self, site=None, **kwargs): return super(ProjectSitemap, self).get_urls(site=site, **kwargs) def items(self): - client = get_service_account_client() projPath = [] # pefm - PublicElasticFileManager to grab public projects - count = 0 - while True: - projects = list_publications(offset=count, limit=200, limit_fields=False) - for proj in projects['listing']: - subpath = { - 'root' : reverse('designsafe_data:data_depot'), - 'project' : proj['project']['value']['projectId'], - 'system' : 'designsafe.storage.published' - } - projPath.append('{root}public/{system}/{project}'.format(**subpath)) - if len(projects['listing']) < 200: - break - count += 200 + + projects = Publication.objects.all() + for proj in projects: + subpath = { + 'root' : reverse('designsafe_data:data_depot'), + 'project' : proj.project_id, + 'system' : 'designsafe.storage.published' + } + projPath.append('{root}public/{system}/{project}'.format(**subpath)) + count = 0 while True: diff --git a/designsafe/static/scripts/dashboard/components/dashboard/dashboard.component.html b/designsafe/static/scripts/dashboard/components/dashboard/dashboard.component.html index 26e0d6cc2d..45340a215d 100644 --- a/designsafe/static/scripts/dashboard/components/dashboard/dashboard.component.html +++ b/designsafe/static/scripts/dashboard/components/dashboard/dashboard.component.html @@ -1,5 +1,5 @@
    -
    + +
    @@ -33,7 +33,7 @@
    -
    +
    Quick Links
    @@ -47,7 +47,7 @@
    -
    + -
    +
    +
    +
    +

    My Tickets

    + Create New +
    + + +
    + +
    +
    +
    +

    Notifications {{$ctrl.notification_count}}

    @@ -104,23 +121,8 @@

    Notifications {{$ctrl.

    -
    -
    -

    My Tickets

    - Create New -
    - -
    - -
    -
    - -
    +
    diff --git a/designsafe/static/scripts/ng-designsafe/components/notification-badge/notification-badge.component.html b/designsafe/static/scripts/ng-designsafe/components/notification-badge/notification-badge.component.html index 78c64a3402..bd706a3cba 100644 --- a/designsafe/static/scripts/ng-designsafe/components/notification-badge/notification-badge.component.html +++ b/designsafe/static/scripts/ng-designsafe/components/notification-badge/notification-badge.component.html @@ -9,7 +9,7 @@
    - \ No newline at end of file diff --git a/designsafe/static/scripts/ng-designsafe/controllers/notifications.js b/designsafe/static/scripts/ng-designsafe/controllers/notifications.js index b0c3b6477c..0fd8875307 100644 --- a/designsafe/static/scripts/ng-designsafe/controllers/notifications.js +++ b/designsafe/static/scripts/ng-designsafe/controllers/notifications.js @@ -40,12 +40,13 @@ export function NotificationBadgeCtrl( $scope.data.unread = 0; } - for (var i=0; i < $scope.data.notifications.length; i++){ - if ($scope.data.notifications[i]['event_type'] == 'job') { - $scope.data.notifications[i]['action_link']=$scope.data.notifications[i]['action_link']=`/rw/workspace/notification/process/${$scope.data.notifications[i]['pk']}`; - } else if ($scope.data.notifications[i]['event_type'] == 'data_depot') { - $scope.data.notifications[i]['action_link']=$scope.data.notifications[i]['action_link']=`/rw/workspace/notification/process/${$scope.data.notifications[i]['pk']}`; - } + for (var i = 0; i < $scope.data.notifications.length; i++) { + const notification = $scope.data.notifications[i]; + if (notification['event_type'] == 'job') { + notification['action_link'] = `/rw/workspace/history`; + } else if (notification['event_type'] == 'data_depot') { + notification['action_link'] = `/data/browser`; + } } }); }; diff --git a/designsafe/static/scripts/ng-designsafe/providers/notifications-provider.js b/designsafe/static/scripts/ng-designsafe/providers/notifications-provider.js index f20142cd4c..a320a02a27 100644 --- a/designsafe/static/scripts/ng-designsafe/providers/notifications-provider.js +++ b/designsafe/static/scripts/ng-designsafe/providers/notifications-provider.js @@ -25,7 +25,6 @@ function NotificationService( * @return {string} url */ function renderLink(msg) { - console.log('rendering link?') const eventType = msg.event_type.toLowerCase(); let url = ''; if (typeof processors[eventType] !== 'undefined' && @@ -34,10 +33,10 @@ function NotificationService( url = processors[eventType].renderLink(msg); } if (msg.status != 'ERROR') { - if (msg.event_type == 'job') { - url=`/rw/workspace/notification/process/${msg.pk}` + if (msg.event_type === 'job') { + url = `/rw/workspace/history`; } else if (msg.event_type == 'data_depot') { - url=`/rw/workspace/notification/process/${msg.pk}` + url = `/data/browser`; } } return url; @@ -120,6 +119,10 @@ function NotificationService( * @param {Object} msg */ function processToastr(e, msg) { + if (msg.event_type === 'job' || msg.event_type ==='WEB' || msg.event_type === 'interactive_session_ready') { + return; + } + try { // msg.extra = JSON.parse(msg.extra); msg.extra = (typeof msg.extra === 'string') ? JSON.parse(msg.extra) : msg.extra; diff --git a/designsafe/static/scripts/notifications/app.js b/designsafe/static/scripts/notifications/app.js index 6c4b1964d0..f2d025ff0b 100644 --- a/designsafe/static/scripts/notifications/app.js +++ b/designsafe/static/scripts/notifications/app.js @@ -31,14 +31,12 @@ angular.module('designsafe').controller('NotificationListCtrl', ['$scope','$root $scope.data.pagination.total = resp.total; $scope.data.notifications = resp.notifs; - for (var i=0; i < $scope.data.notifications.length; i++){ - // $scope.data.notifications[i] = angular.fromJson($scope.data.notifications[i]); - // $scope.data.notifications[i]['fields']['extra'] = angular.fromJson($scope.data.notifications[i]['fields']['extra']); - // $scope.data.notifications[i]['datetime'] = Date($scope.data.notifications[i]['datetime']); - if ($scope.data.notifications[i]['event_type'] == 'job') { - $scope.data.notifications[i]['action_link']=`/rw/workspace/notification/process/${$scope.data.notifications[i]['pk']}`; - } else if ($scope.data.notifications[i]['event_type'] == 'data_depot') { - $scope.data.notifications[i]['action_link']=`/rw/workspace/notification/process/${$scope.data.notifications[i]['pk']}`; + for (var i = 0; i < $scope.data.notifications.length; i++) { + const notification = $scope.data.notifications[i]; + if (notification['event_type'] == 'job') { + notification['action_link'] = `/rw/workspace/history`; + } else if (notification['event_type'] == 'data_depot') { + notification['action_link'] = '/data/browser'; } } diff --git a/designsafe/static/styles/main.css b/designsafe/static/styles/main.css index 55244fa236..131c1d87b8 100644 --- a/designsafe/static/styles/main.css +++ b/designsafe/static/styles/main.css @@ -944,7 +944,7 @@ li .popover.right { flex-direction: column; flex-grow: 1; flex-shrink: 0; - flex-basis: auto; + flex-basis: 0; } .o-site__body > .container-fluid { diff --git a/designsafe/static/styles/ng-designsafe.css b/designsafe/static/styles/ng-designsafe.css index b555c25527..94ac89369b 100644 --- a/designsafe/static/styles/ng-designsafe.css +++ b/designsafe/static/styles/ng-designsafe.css @@ -1074,3 +1074,8 @@ i[class^="icon-ls-pre/post"]:before, :root { color-scheme: only light !important; } + +.html-app-container { + padding: 30px !important; + margin-top: 20px !important; +} diff --git a/designsafe/static/styles/variables.css b/designsafe/static/styles/variables.css index 6cb9e46c01..9c9c81e540 100644 --- a/designsafe/static/styles/variables.css +++ b/designsafe/static/styles/variables.css @@ -9,8 +9,8 @@ --global-color-primary--light: #c6c6c6; --global-color-primary--normal: #afafaf; --global-color-primary--dark: #707070; - --global-color-primary--x-dark: #484848; /* ¹ */ - --global-color-primary--xx-dark: #222222; /* ¹ */ + --global-color-primary--x-dark: #484848; + --global-color-primary--xx-dark: #222222; /* Space */ --global-space--above-breadcrumbs: 35px; diff --git a/designsafe/static/vendor/bootstrap-ds/css/bootstrap.css b/designsafe/static/vendor/bootstrap-ds/css/bootstrap.css index b389ce5759..f743a452a2 100755 --- a/designsafe/static/vendor/bootstrap-ds/css/bootstrap.css +++ b/designsafe/static/vendor/bootstrap-ds/css/bootstrap.css @@ -2853,7 +2853,7 @@ input[type="search"] { .pub-info-modal-label { vertical-align: top; display: inline-block; - width:30%; + width:40%; } .pub-info-modal-heading { border-bottom: darkgrey; @@ -2863,7 +2863,7 @@ input[type="search"] { } .pub-info-modal-data { display: inline-block; - width: 68%; + width: 60%; font-weight:bold; } .pub-info-modal-body { diff --git a/designsafe/templates/base.j2 b/designsafe/templates/base.j2 index 664eaf05f5..d1c2875e4c 100644 --- a/designsafe/templates/base.j2 +++ b/designsafe/templates/base.j2 @@ -38,6 +38,7 @@ {% block styles %}{% endblock %} {% render_block "css" %} + {% render_block "react_assets" %} {% recaptcha_init 'en' %} @@ -147,6 +148,7 @@ firstName: "{{ request.user.first_name }}", lastName: "{{ request.user.last_name }}", email: "{{ request.user.email }}", + institution: "{{ request.user.profile.institution }}" }; diff --git a/designsafe/templates/includes/header.html b/designsafe/templates/includes/header.html index 78bf801c58..9a7df976d6 100644 --- a/designsafe/templates/includes/header.html +++ b/designsafe/templates/includes/header.html @@ -36,15 +36,7 @@ {% if user.is_authenticated %} - - {% if not agave_ready %} -   - API Session Not Available. Click for details. - {% endif %} - - +
    Welcome, {{ user.first_name }}!