mirror of
https://github.com/aykhans/AzSuicideDataVisualization.git
synced 2025-07-03 22:57:06 +00:00
first commit
This commit is contained in:
@ -0,0 +1,142 @@
|
||||
"""
|
||||
Classes for managing Checkpoints.
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from tornado.web import HTTPError
|
||||
|
||||
from traitlets.config.configurable import LoggingConfigurable
|
||||
|
||||
|
||||
class Checkpoints(LoggingConfigurable):
|
||||
"""
|
||||
Base class for managing checkpoints for a ContentsManager.
|
||||
|
||||
Subclasses are required to implement:
|
||||
|
||||
create_checkpoint(self, contents_mgr, path)
|
||||
restore_checkpoint(self, contents_mgr, checkpoint_id, path)
|
||||
rename_checkpoint(self, checkpoint_id, old_path, new_path)
|
||||
delete_checkpoint(self, checkpoint_id, path)
|
||||
list_checkpoints(self, path)
|
||||
"""
|
||||
def create_checkpoint(self, contents_mgr, path):
|
||||
"""Create a checkpoint."""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
|
||||
"""Restore a checkpoint"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def rename_checkpoint(self, checkpoint_id, old_path, new_path):
|
||||
"""Rename a single checkpoint from old_path to new_path."""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def delete_checkpoint(self, checkpoint_id, path):
|
||||
"""delete a checkpoint for a file"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def list_checkpoints(self, path):
|
||||
"""Return a list of checkpoints for a given file"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def rename_all_checkpoints(self, old_path, new_path):
|
||||
"""Rename all checkpoints for old_path to new_path."""
|
||||
for cp in self.list_checkpoints(old_path):
|
||||
self.rename_checkpoint(cp['id'], old_path, new_path)
|
||||
|
||||
def delete_all_checkpoints(self, path):
|
||||
"""Delete all checkpoints for the given path."""
|
||||
for checkpoint in self.list_checkpoints(path):
|
||||
self.delete_checkpoint(checkpoint['id'], path)
|
||||
|
||||
|
||||
class GenericCheckpointsMixin:
|
||||
"""
|
||||
Helper for creating Checkpoints subclasses that can be used with any
|
||||
ContentsManager.
|
||||
|
||||
Provides a ContentsManager-agnostic implementation of `create_checkpoint`
|
||||
and `restore_checkpoint` in terms of the following operations:
|
||||
|
||||
- create_file_checkpoint(self, content, format, path)
|
||||
- create_notebook_checkpoint(self, nb, path)
|
||||
- get_file_checkpoint(self, checkpoint_id, path)
|
||||
- get_notebook_checkpoint(self, checkpoint_id, path)
|
||||
|
||||
To create a generic CheckpointManager, add this mixin to a class that
|
||||
implement the above four methods plus the remaining Checkpoints API
|
||||
methods:
|
||||
|
||||
- delete_checkpoint(self, checkpoint_id, path)
|
||||
- list_checkpoints(self, path)
|
||||
- rename_checkpoint(self, checkpoint_id, old_path, new_path)
|
||||
"""
|
||||
|
||||
def create_checkpoint(self, contents_mgr, path):
|
||||
model = contents_mgr.get(path, content=True)
|
||||
type = model['type']
|
||||
if type == 'notebook':
|
||||
return self.create_notebook_checkpoint(
|
||||
model['content'],
|
||||
path,
|
||||
)
|
||||
elif type == 'file':
|
||||
return self.create_file_checkpoint(
|
||||
model['content'],
|
||||
model['format'],
|
||||
path,
|
||||
)
|
||||
else:
|
||||
raise HTTPError(500, f'Unexpected type {type}')
|
||||
|
||||
def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
|
||||
"""Restore a checkpoint."""
|
||||
type = contents_mgr.get(path, content=False)['type']
|
||||
if type == 'notebook':
|
||||
model = self.get_notebook_checkpoint(checkpoint_id, path)
|
||||
elif type == 'file':
|
||||
model = self.get_file_checkpoint(checkpoint_id, path)
|
||||
else:
|
||||
raise HTTPError(500, f'Unexpected type {type}')
|
||||
contents_mgr.save(model, path)
|
||||
|
||||
# Required Methods
|
||||
def create_file_checkpoint(self, content, format, path):
|
||||
"""Create a checkpoint of the current state of a file
|
||||
|
||||
Returns a checkpoint model for the new checkpoint.
|
||||
"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def create_notebook_checkpoint(self, nb, path):
|
||||
"""Create a checkpoint of the current state of a file
|
||||
|
||||
Returns a checkpoint model for the new checkpoint.
|
||||
"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def get_file_checkpoint(self, checkpoint_id, path):
|
||||
"""Get the content of a checkpoint for a non-notebook file.
|
||||
|
||||
Returns a dict of the form:
|
||||
{
|
||||
'type': 'file',
|
||||
'content': <str>,
|
||||
'format': {'text','base64'},
|
||||
}
|
||||
"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
||||
|
||||
def get_notebook_checkpoint(self, checkpoint_id, path):
|
||||
"""Get the content of a checkpoint for a notebook.
|
||||
|
||||
Returns a dict of the form:
|
||||
{
|
||||
'type': 'notebook',
|
||||
'content': <output of nbformat.read>,
|
||||
}
|
||||
"""
|
||||
raise NotImplementedError("must be implemented in a subclass")
|
@ -0,0 +1,198 @@
|
||||
"""
|
||||
File-based Checkpoints implementations.
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from tornado.web import HTTPError
|
||||
|
||||
from .checkpoints import (
|
||||
Checkpoints,
|
||||
GenericCheckpointsMixin,
|
||||
)
|
||||
from .fileio import FileManagerMixin
|
||||
|
||||
from jupyter_core.utils import ensure_dir_exists
|
||||
from ipython_genutils.py3compat import getcwd
|
||||
from traitlets import Unicode
|
||||
|
||||
from notebook import _tz as tz
|
||||
|
||||
|
||||
class FileCheckpoints(FileManagerMixin, Checkpoints):
|
||||
"""
|
||||
A Checkpoints that caches checkpoints for files in adjacent
|
||||
directories.
|
||||
|
||||
Only works with FileContentsManager. Use GenericFileCheckpoints if
|
||||
you want file-based checkpoints with another ContentsManager.
|
||||
"""
|
||||
|
||||
checkpoint_dir = Unicode(
|
||||
'.ipynb_checkpoints',
|
||||
config=True,
|
||||
help="""The directory name in which to keep file checkpoints
|
||||
|
||||
This is a path relative to the file's own directory.
|
||||
|
||||
By default, it is .ipynb_checkpoints
|
||||
""",
|
||||
)
|
||||
|
||||
root_dir = Unicode(config=True)
|
||||
|
||||
def _root_dir_default(self):
|
||||
try:
|
||||
return self.parent.root_dir
|
||||
except AttributeError:
|
||||
return getcwd()
|
||||
|
||||
# ContentsManager-dependent checkpoint API
|
||||
def create_checkpoint(self, contents_mgr, path):
|
||||
"""Create a checkpoint."""
|
||||
checkpoint_id = 'checkpoint'
|
||||
src_path = contents_mgr._get_os_path(path)
|
||||
dest_path = self.checkpoint_path(checkpoint_id, path)
|
||||
self._copy(src_path, dest_path)
|
||||
return self.checkpoint_model(checkpoint_id, dest_path)
|
||||
|
||||
def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
|
||||
"""Restore a checkpoint."""
|
||||
src_path = self.checkpoint_path(checkpoint_id, path)
|
||||
dest_path = contents_mgr._get_os_path(path)
|
||||
self._copy(src_path, dest_path)
|
||||
|
||||
# ContentsManager-independent checkpoint API
|
||||
def rename_checkpoint(self, checkpoint_id, old_path, new_path):
|
||||
"""Rename a checkpoint from old_path to new_path."""
|
||||
old_cp_path = self.checkpoint_path(checkpoint_id, old_path)
|
||||
new_cp_path = self.checkpoint_path(checkpoint_id, new_path)
|
||||
if os.path.isfile(old_cp_path):
|
||||
self.log.debug(
|
||||
"Renaming checkpoint %s -> %s",
|
||||
old_cp_path,
|
||||
new_cp_path,
|
||||
)
|
||||
with self.perm_to_403():
|
||||
shutil.move(old_cp_path, new_cp_path)
|
||||
|
||||
def delete_checkpoint(self, checkpoint_id, path):
|
||||
"""delete a file's checkpoint"""
|
||||
path = path.strip('/')
|
||||
cp_path = self.checkpoint_path(checkpoint_id, path)
|
||||
if not os.path.isfile(cp_path):
|
||||
self.no_such_checkpoint(path, checkpoint_id)
|
||||
|
||||
self.log.debug("unlinking %s", cp_path)
|
||||
with self.perm_to_403():
|
||||
os.unlink(cp_path)
|
||||
|
||||
def list_checkpoints(self, path):
|
||||
"""list the checkpoints for a given file
|
||||
|
||||
This contents manager currently only supports one checkpoint per file.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
checkpoint_id = "checkpoint"
|
||||
os_path = self.checkpoint_path(checkpoint_id, path)
|
||||
if not os.path.isfile(os_path):
|
||||
return []
|
||||
else:
|
||||
return [self.checkpoint_model(checkpoint_id, os_path)]
|
||||
|
||||
# Checkpoint-related utilities
|
||||
def checkpoint_path(self, checkpoint_id, path):
|
||||
"""find the path to a checkpoint"""
|
||||
path = path.strip('/')
|
||||
parent, name = ('/' + path).rsplit('/', 1)
|
||||
parent = parent.strip('/')
|
||||
basename, ext = os.path.splitext(name)
|
||||
filename = f"{basename}-{checkpoint_id}{ext}"
|
||||
os_path = self._get_os_path(path=parent)
|
||||
cp_dir = os.path.join(os_path, self.checkpoint_dir)
|
||||
with self.perm_to_403():
|
||||
ensure_dir_exists(cp_dir)
|
||||
cp_path = os.path.join(cp_dir, filename)
|
||||
return cp_path
|
||||
|
||||
def checkpoint_model(self, checkpoint_id, os_path):
|
||||
"""construct the info dict for a given checkpoint"""
|
||||
stats = os.stat(os_path)
|
||||
last_modified = tz.utcfromtimestamp(stats.st_mtime)
|
||||
info = dict(
|
||||
id=checkpoint_id,
|
||||
last_modified=last_modified,
|
||||
)
|
||||
return info
|
||||
|
||||
# Error Handling
|
||||
def no_such_checkpoint(self, path, checkpoint_id):
|
||||
raise HTTPError(
|
||||
404,
|
||||
f'Checkpoint does not exist: {path}@{checkpoint_id}'
|
||||
)
|
||||
|
||||
|
||||
class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints):
|
||||
"""
|
||||
Local filesystem Checkpoints that works with any conforming
|
||||
ContentsManager.
|
||||
"""
|
||||
def create_file_checkpoint(self, content, format, path):
|
||||
"""Create a checkpoint from the current content of a file."""
|
||||
path = path.strip('/')
|
||||
# only the one checkpoint ID:
|
||||
checkpoint_id = "checkpoint"
|
||||
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
|
||||
self.log.debug("creating checkpoint for %s", path)
|
||||
with self.perm_to_403():
|
||||
self._save_file(os_checkpoint_path, content, format=format)
|
||||
|
||||
# return the checkpoint info
|
||||
return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
|
||||
|
||||
def create_notebook_checkpoint(self, nb, path):
|
||||
"""Create a checkpoint from the current content of a notebook."""
|
||||
path = path.strip('/')
|
||||
# only the one checkpoint ID:
|
||||
checkpoint_id = "checkpoint"
|
||||
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
|
||||
self.log.debug("creating checkpoint for %s", path)
|
||||
with self.perm_to_403():
|
||||
self._save_notebook(os_checkpoint_path, nb)
|
||||
|
||||
# return the checkpoint info
|
||||
return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
|
||||
|
||||
def get_notebook_checkpoint(self, checkpoint_id, path):
|
||||
"""Get a checkpoint for a notebook."""
|
||||
path = path.strip('/')
|
||||
self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
|
||||
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
|
||||
|
||||
if not os.path.isfile(os_checkpoint_path):
|
||||
self.no_such_checkpoint(path, checkpoint_id)
|
||||
|
||||
return {
|
||||
'type': 'notebook',
|
||||
'content': self._read_notebook(
|
||||
os_checkpoint_path,
|
||||
as_version=4,
|
||||
),
|
||||
}
|
||||
|
||||
def get_file_checkpoint(self, checkpoint_id, path):
|
||||
"""Get a checkpoint for a file."""
|
||||
path = path.strip('/')
|
||||
self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
|
||||
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
|
||||
|
||||
if not os.path.isfile(os_checkpoint_path):
|
||||
self.no_such_checkpoint(path, checkpoint_id)
|
||||
|
||||
content, format = self._read_file(os_checkpoint_path, format=None)
|
||||
return {
|
||||
'type': 'file',
|
||||
'content': content,
|
||||
'format': format,
|
||||
}
|
340
.venv/Lib/site-packages/notebook/services/contents/fileio.py
Normal file
340
.venv/Lib/site-packages/notebook/services/contents/fileio.py
Normal file
@ -0,0 +1,340 @@
|
||||
"""
|
||||
Utilities for file-based Contents/Checkpoints managers.
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from tornado.web import HTTPError
|
||||
|
||||
from notebook.utils import (
|
||||
to_api_path,
|
||||
to_os_path,
|
||||
)
|
||||
import nbformat
|
||||
|
||||
from ipython_genutils.py3compat import str_to_unicode
|
||||
|
||||
from traitlets.config import Configurable
|
||||
from traitlets import Bool
|
||||
|
||||
from base64 import encodebytes, decodebytes
|
||||
|
||||
|
||||
def replace_file(src, dst):
|
||||
""" replace dst with src
|
||||
|
||||
switches between os.replace or os.rename based on python 2.7 or python 3
|
||||
"""
|
||||
if hasattr(os, 'replace'): # PY3
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
if os.name == 'nt' and os.path.exists(dst):
|
||||
# Rename over existing file doesn't work on Windows
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
def copy2_safe(src, dst, log=None):
|
||||
"""copy src to dst
|
||||
|
||||
like shutil.copy2, but log errors in copystat instead of raising
|
||||
"""
|
||||
shutil.copyfile(src, dst)
|
||||
try:
|
||||
shutil.copystat(src, dst)
|
||||
except OSError:
|
||||
if log:
|
||||
log.debug("copystat on %s failed", dst, exc_info=True)
|
||||
|
||||
def path_to_intermediate(path):
|
||||
'''Name of the intermediate file used in atomic writes.
|
||||
|
||||
The .~ prefix will make Dropbox ignore the temporary file.'''
|
||||
dirname, basename = os.path.split(path)
|
||||
return os.path.join(dirname, '.~'+basename)
|
||||
|
||||
def path_to_invalid(path):
|
||||
'''Name of invalid file after a failed atomic write and subsequent read.'''
|
||||
dirname, basename = os.path.split(path)
|
||||
return os.path.join(dirname, basename+'.invalid')
|
||||
|
||||
@contextmanager
|
||||
def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs):
|
||||
"""Context manager to write to a file only if the entire write is successful.
|
||||
|
||||
This works by copying the previous file contents to a temporary file in the
|
||||
same directory, and renaming that file back to the target if the context
|
||||
exits with an error. If the context is successful, the new data is synced to
|
||||
disk and the temporary file is removed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : str
|
||||
The target file to write to.
|
||||
|
||||
text : bool, optional
|
||||
Whether to open the file in text mode (i.e. to write unicode). Default is
|
||||
True.
|
||||
|
||||
encoding : str, optional
|
||||
The encoding to use for files opened in text mode. Default is UTF-8.
|
||||
|
||||
**kwargs
|
||||
Passed to :func:`io.open`.
|
||||
"""
|
||||
# realpath doesn't work on Windows: https://bugs.python.org/issue9949
|
||||
# Luckily, we only need to resolve the file itself being a symlink, not
|
||||
# any of its directories, so this will suffice:
|
||||
if os.path.islink(path):
|
||||
path = os.path.join(os.path.dirname(path), os.readlink(path))
|
||||
|
||||
tmp_path = path_to_intermediate(path)
|
||||
|
||||
if os.path.isfile(path):
|
||||
copy2_safe(path, tmp_path, log=log)
|
||||
|
||||
if text:
|
||||
# Make sure that text files have Unix linefeeds by default
|
||||
kwargs.setdefault('newline', '\n')
|
||||
fileobj = open(path, 'w', encoding=encoding, **kwargs)
|
||||
else:
|
||||
fileobj = open(path, 'wb', **kwargs)
|
||||
|
||||
try:
|
||||
yield fileobj
|
||||
except:
|
||||
# Failed! Move the backup file back to the real path to avoid corruption
|
||||
fileobj.close()
|
||||
replace_file(tmp_path, path)
|
||||
raise
|
||||
|
||||
# Flush to disk
|
||||
fileobj.flush()
|
||||
os.fsync(fileobj.fileno())
|
||||
fileobj.close()
|
||||
|
||||
# Written successfully, now remove the backup copy
|
||||
if os.path.isfile(tmp_path):
|
||||
os.remove(tmp_path)
|
||||
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs):
|
||||
"""Context manager to write file without doing atomic writing
|
||||
( for weird filesystem eg: nfs).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : str
|
||||
The target file to write to.
|
||||
|
||||
text : bool, optional
|
||||
Whether to open the file in text mode (i.e. to write unicode). Default is
|
||||
True.
|
||||
|
||||
encoding : str, optional
|
||||
The encoding to use for files opened in text mode. Default is UTF-8.
|
||||
|
||||
**kwargs
|
||||
Passed to :func:`io.open`.
|
||||
"""
|
||||
# realpath doesn't work on Windows: https://bugs.python.org/issue9949
|
||||
# Luckily, we only need to resolve the file itself being a symlink, not
|
||||
# any of its directories, so this will suffice:
|
||||
if os.path.islink(path):
|
||||
path = os.path.join(os.path.dirname(path), os.readlink(path))
|
||||
|
||||
if text:
|
||||
# Make sure that text files have Unix linefeeds by default
|
||||
kwargs.setdefault('newline', '\n')
|
||||
fileobj = open(path, 'w', encoding=encoding, **kwargs)
|
||||
else:
|
||||
fileobj = open(path, 'wb', **kwargs)
|
||||
|
||||
try:
|
||||
yield fileobj
|
||||
except:
|
||||
fileobj.close()
|
||||
raise
|
||||
|
||||
fileobj.close()
|
||||
|
||||
|
||||
|
||||
|
||||
class FileManagerMixin(Configurable):
|
||||
"""
|
||||
Mixin for ContentsAPI classes that interact with the filesystem.
|
||||
|
||||
Provides facilities for reading, writing, and copying both notebooks and
|
||||
generic files.
|
||||
|
||||
Shared by FileContentsManager and FileCheckpoints.
|
||||
|
||||
Note
|
||||
----
|
||||
Classes using this mixin must provide the following attributes:
|
||||
|
||||
root_dir : unicode
|
||||
A directory against which API-style paths are to be resolved.
|
||||
|
||||
log : logging.Logger
|
||||
"""
|
||||
|
||||
use_atomic_writing = Bool(True, config=True, help=
|
||||
"""By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones.
|
||||
This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs).
|
||||
If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""")
|
||||
|
||||
@contextmanager
|
||||
def open(self, os_path, *args, **kwargs):
|
||||
"""wrapper around io.open that turns permission errors into 403"""
|
||||
with self.perm_to_403(os_path):
|
||||
with open(os_path, *args, **kwargs) as f:
|
||||
yield f
|
||||
|
||||
@contextmanager
|
||||
def atomic_writing(self, os_path, *args, **kwargs):
|
||||
"""wrapper around atomic_writing that turns permission errors to 403.
|
||||
Depending on flag 'use_atomic_writing', the wrapper perform an actual atomic writing or
|
||||
simply writes the file (whatever an old exists or not)"""
|
||||
with self.perm_to_403(os_path):
|
||||
if self.use_atomic_writing:
|
||||
with atomic_writing(os_path, *args, log=self.log, **kwargs) as f:
|
||||
yield f
|
||||
else:
|
||||
with _simple_writing(os_path, *args, log=self.log, **kwargs) as f:
|
||||
yield f
|
||||
|
||||
@contextmanager
|
||||
def perm_to_403(self, os_path=''):
|
||||
"""context manager for turning permission errors into 403."""
|
||||
try:
|
||||
yield
|
||||
except OSError as e:
|
||||
if e.errno in {errno.EPERM, errno.EACCES}:
|
||||
# make 403 error message without root prefix
|
||||
# this may not work perfectly on unicode paths on Python 2,
|
||||
# but nobody should be doing that anyway.
|
||||
if not os_path:
|
||||
os_path = str_to_unicode(e.filename or 'unknown file')
|
||||
path = to_api_path(os_path, root=self.root_dir)
|
||||
raise HTTPError(403, f'Permission denied: {path}') from e
|
||||
else:
|
||||
raise
|
||||
|
||||
def _copy(self, src, dest):
|
||||
"""copy src to dest
|
||||
|
||||
like shutil.copy2, but log errors in copystat
|
||||
"""
|
||||
copy2_safe(src, dest, log=self.log)
|
||||
|
||||
def _get_os_path(self, path):
|
||||
"""Given an API path, return its file system path.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The relative API path to the named file.
|
||||
|
||||
Returns
|
||||
-------
|
||||
path : string
|
||||
Native, absolute OS path to for a file.
|
||||
|
||||
Raises
|
||||
------
|
||||
404: if path is outside root
|
||||
"""
|
||||
root = os.path.abspath(self.root_dir)
|
||||
os_path = to_os_path(path, root)
|
||||
if not (os.path.abspath(os_path) + os.path.sep).startswith(root):
|
||||
raise HTTPError(404, f"{path} is outside root contents directory")
|
||||
return os_path
|
||||
|
||||
def _read_notebook(self, os_path, as_version=4):
|
||||
"""Read a notebook from an os path."""
|
||||
with self.open(os_path, 'r', encoding='utf-8') as f:
|
||||
try:
|
||||
return nbformat.read(f, as_version=as_version)
|
||||
except Exception as e:
|
||||
e_orig = e
|
||||
|
||||
# If use_atomic_writing is enabled, we'll guess that it was also
|
||||
# enabled when this notebook was written and look for a valid
|
||||
# atomic intermediate.
|
||||
tmp_path = path_to_intermediate(os_path)
|
||||
|
||||
if not self.use_atomic_writing or not os.path.exists(tmp_path):
|
||||
raise HTTPError(
|
||||
400,
|
||||
f"Unreadable Notebook: {os_path} {e_orig!r}",
|
||||
)
|
||||
|
||||
# Move the bad file aside, restore the intermediate, and try again.
|
||||
invalid_file = path_to_invalid(os_path)
|
||||
replace_file(os_path, invalid_file)
|
||||
replace_file(tmp_path, os_path)
|
||||
return self._read_notebook(os_path, as_version)
|
||||
|
||||
def _save_notebook(self, os_path, nb):
|
||||
"""Save a notebook to an os_path."""
|
||||
with self.atomic_writing(os_path, encoding='utf-8') as f:
|
||||
nbformat.write(nb, f, version=nbformat.NO_CONVERT)
|
||||
|
||||
def _read_file(self, os_path, format):
|
||||
"""Read a non-notebook file.
|
||||
|
||||
os_path: The path to be read.
|
||||
format:
|
||||
If 'text', the contents will be decoded as UTF-8.
|
||||
If 'base64', the raw bytes contents will be encoded as base64.
|
||||
If not specified, try to decode as UTF-8, and fall back to base64
|
||||
"""
|
||||
if not os.path.isfile(os_path):
|
||||
raise HTTPError(400, f"Cannot read non-file {os_path}")
|
||||
|
||||
with self.open(os_path, 'rb') as f:
|
||||
bcontent = f.read()
|
||||
|
||||
if format is None or format == 'text':
|
||||
# Try to interpret as unicode if format is unknown or if unicode
|
||||
# was explicitly requested.
|
||||
try:
|
||||
return bcontent.decode('utf8'), 'text'
|
||||
except UnicodeError as e:
|
||||
if format == 'text':
|
||||
raise HTTPError(
|
||||
400,
|
||||
f"{os_path} is not UTF-8 encoded",
|
||||
reason='bad format',
|
||||
) from e
|
||||
return encodebytes(bcontent).decode('ascii'), 'base64'
|
||||
|
||||
def _save_file(self, os_path, content, format):
|
||||
"""Save content of a generic file."""
|
||||
if format not in {'text', 'base64'}:
|
||||
raise HTTPError(
|
||||
400,
|
||||
"Must specify format of file contents as 'text' or 'base64'",
|
||||
)
|
||||
try:
|
||||
if format == 'text':
|
||||
bcontent = content.encode('utf8')
|
||||
else:
|
||||
b64_bytes = content.encode('ascii')
|
||||
bcontent = decodebytes(b64_bytes)
|
||||
except Exception as e:
|
||||
raise HTTPError(
|
||||
400, f'Encoding error saving {os_path}: {e}'
|
||||
) from e
|
||||
|
||||
with self.atomic_writing(os_path, text=False) as f:
|
||||
f.write(bcontent)
|
@ -0,0 +1,606 @@
|
||||
"""A contents manager that uses the local file system for storage."""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from datetime import datetime
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import warnings
|
||||
import mimetypes
|
||||
import nbformat
|
||||
|
||||
from send2trash import send2trash
|
||||
from send2trash.exceptions import TrashPermissionError
|
||||
from tornado import web
|
||||
|
||||
from .filecheckpoints import FileCheckpoints
|
||||
from .fileio import FileManagerMixin
|
||||
from .manager import ContentsManager
|
||||
from ...utils import exists
|
||||
|
||||
from ipython_genutils.importstring import import_item
|
||||
from traitlets import Any, Unicode, Bool, TraitError, observe, default, validate
|
||||
from ipython_genutils.py3compat import getcwd, string_types
|
||||
|
||||
from notebook import _tz as tz
|
||||
from notebook.utils import (
|
||||
is_hidden, is_file_hidden,
|
||||
to_api_path,
|
||||
)
|
||||
from notebook.base.handlers import AuthenticatedFileHandler
|
||||
from notebook.transutils import _
|
||||
|
||||
from os.path import samefile
|
||||
|
||||
_script_exporter = None
|
||||
|
||||
|
||||
def _post_save_script(model, os_path, contents_manager, **kwargs):
|
||||
"""convert notebooks to Python script after save with nbconvert
|
||||
|
||||
replaces `jupyter notebook --script`
|
||||
"""
|
||||
from nbconvert.exporters.script import ScriptExporter
|
||||
warnings.warn("`_post_save_script` is deprecated and will be removed in Notebook 5.0", DeprecationWarning)
|
||||
|
||||
if model['type'] != 'notebook':
|
||||
return
|
||||
|
||||
global _script_exporter
|
||||
if _script_exporter is None:
|
||||
_script_exporter = ScriptExporter(parent=contents_manager)
|
||||
log = contents_manager.log
|
||||
|
||||
base, ext = os.path.splitext(os_path)
|
||||
script, resources = _script_exporter.from_filename(os_path)
|
||||
script_fname = base + resources.get('output_extension', '.txt')
|
||||
log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir))
|
||||
with open(script_fname, 'w', encoding='utf-8') as f:
|
||||
f.write(script)
|
||||
|
||||
|
||||
class FileContentsManager(FileManagerMixin, ContentsManager):
|
||||
|
||||
root_dir = Unicode(config=True)
|
||||
|
||||
@default('root_dir')
|
||||
def _default_root_dir(self):
|
||||
try:
|
||||
return self.parent.notebook_dir
|
||||
except AttributeError:
|
||||
return getcwd()
|
||||
|
||||
save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0')
|
||||
@observe('save_script')
|
||||
def _update_save_script(self, change):
|
||||
if not change['new']:
|
||||
return
|
||||
self.log.warning("""
|
||||
`--script` is deprecated and will be removed in notebook 5.0.
|
||||
|
||||
You can trigger nbconvert via pre- or post-save hooks:
|
||||
|
||||
ContentsManager.pre_save_hook
|
||||
FileContentsManager.post_save_hook
|
||||
|
||||
A post-save hook has been registered that calls:
|
||||
|
||||
jupyter nbconvert --to script [notebook]
|
||||
|
||||
which behaves similarly to `--script`.
|
||||
""")
|
||||
|
||||
self.post_save_hook = _post_save_script
|
||||
|
||||
post_save_hook = Any(None, config=True, allow_none=True,
|
||||
help="""Python callable or importstring thereof
|
||||
|
||||
to be called on the path of a file just saved.
|
||||
|
||||
This can be used to process the file on disk,
|
||||
such as converting the notebook to a script or HTML via nbconvert.
|
||||
|
||||
It will be called as (all arguments passed by keyword)::
|
||||
|
||||
hook(os_path=os_path, model=model, contents_manager=instance)
|
||||
|
||||
- path: the filesystem path to the file just written
|
||||
- model: the model representing the file
|
||||
- contents_manager: this ContentsManager instance
|
||||
"""
|
||||
)
|
||||
|
||||
@validate('post_save_hook')
|
||||
def _validate_post_save_hook(self, proposal):
|
||||
value = proposal['value']
|
||||
if isinstance(value, string_types):
|
||||
value = import_item(value)
|
||||
if not callable(value):
|
||||
raise TraitError("post_save_hook must be callable")
|
||||
return value
|
||||
|
||||
def run_post_save_hook(self, model, os_path):
|
||||
"""Run the post-save hook if defined, and log errors"""
|
||||
if self.post_save_hook:
|
||||
try:
|
||||
self.log.debug("Running post-save hook on %s", os_path)
|
||||
self.post_save_hook(os_path=os_path, model=model, contents_manager=self)
|
||||
except Exception as e:
|
||||
self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True)
|
||||
raise web.HTTPError(500, f'Unexpected error while running post hook save: {e}') from e
|
||||
|
||||
@validate('root_dir')
|
||||
def _validate_root_dir(self, proposal):
|
||||
"""Do a bit of validation of the root_dir."""
|
||||
value = proposal['value']
|
||||
if not os.path.isabs(value):
|
||||
# If we receive a non-absolute path, make it absolute.
|
||||
value = os.path.abspath(value)
|
||||
if not os.path.isdir(value):
|
||||
raise TraitError(f"{value!r} is not a directory")
|
||||
return value
|
||||
|
||||
@default('checkpoints_class')
|
||||
def _checkpoints_class_default(self):
|
||||
return FileCheckpoints
|
||||
|
||||
delete_to_trash = Bool(True, config=True,
|
||||
help="""If True (default), deleting files will send them to the
|
||||
platform's trash/recycle bin, where they can be recovered. If False,
|
||||
deleting files really deletes them.""")
|
||||
|
||||
@default('files_handler_class')
|
||||
def _files_handler_class_default(self):
|
||||
return AuthenticatedFileHandler
|
||||
|
||||
@default('files_handler_params')
|
||||
def _files_handler_params_default(self):
|
||||
return {'path': self.root_dir}
|
||||
|
||||
def is_hidden(self, path):
|
||||
"""Does the API style path correspond to a hidden directory or file?
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The path to check. This is an API path (`/` separated,
|
||||
relative to root_dir).
|
||||
|
||||
Returns
|
||||
-------
|
||||
hidden : bool
|
||||
Whether the path exists and is hidden.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
os_path = self._get_os_path(path=path)
|
||||
return is_hidden(os_path, self.root_dir)
|
||||
|
||||
def file_exists(self, path):
|
||||
"""Returns True if the file exists, else returns False.
|
||||
|
||||
API-style wrapper for os.path.isfile
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The relative path to the file (with '/' as separator)
|
||||
|
||||
Returns
|
||||
-------
|
||||
exists : bool
|
||||
Whether the file exists.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
os_path = self._get_os_path(path)
|
||||
return os.path.isfile(os_path)
|
||||
|
||||
def dir_exists(self, path):
|
||||
"""Does the API-style path refer to an extant directory?
|
||||
|
||||
API-style wrapper for os.path.isdir
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The path to check. This is an API path (`/` separated,
|
||||
relative to root_dir).
|
||||
|
||||
Returns
|
||||
-------
|
||||
exists : bool
|
||||
Whether the path is indeed a directory.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
os_path = self._get_os_path(path=path)
|
||||
return os.path.isdir(os_path)
|
||||
|
||||
def exists(self, path):
|
||||
"""Returns True if the path exists, else returns False.
|
||||
|
||||
API-style wrapper for os.path.exists
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The API path to the file (with '/' as separator)
|
||||
|
||||
Returns
|
||||
-------
|
||||
exists : bool
|
||||
Whether the target exists.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
os_path = self._get_os_path(path=path)
|
||||
return exists(os_path)
|
||||
|
||||
def _base_model(self, path):
|
||||
"""Build the common base of a contents model"""
|
||||
os_path = self._get_os_path(path)
|
||||
info = os.lstat(os_path)
|
||||
|
||||
try:
|
||||
# size of file
|
||||
size = info.st_size
|
||||
except (ValueError, OSError):
|
||||
self.log.warning('Unable to get size.')
|
||||
size = None
|
||||
|
||||
try:
|
||||
last_modified = tz.utcfromtimestamp(info.st_mtime)
|
||||
except (ValueError, OSError):
|
||||
# Files can rarely have an invalid timestamp
|
||||
# https://github.com/jupyter/notebook/issues/2539
|
||||
# https://github.com/jupyter/notebook/issues/2757
|
||||
# Use the Unix epoch as a fallback so we don't crash.
|
||||
self.log.warning('Invalid mtime %s for %s', info.st_mtime, os_path)
|
||||
last_modified = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC)
|
||||
|
||||
try:
|
||||
created = tz.utcfromtimestamp(info.st_ctime)
|
||||
except (ValueError, OSError): # See above
|
||||
self.log.warning('Invalid ctime %s for %s', info.st_ctime, os_path)
|
||||
created = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC)
|
||||
|
||||
# Create the base model.
|
||||
model = {}
|
||||
model['name'] = path.rsplit('/', 1)[-1]
|
||||
model['path'] = path
|
||||
model['last_modified'] = last_modified
|
||||
model['created'] = created
|
||||
model['content'] = None
|
||||
model['format'] = None
|
||||
model['mimetype'] = None
|
||||
model['size'] = size
|
||||
|
||||
try:
|
||||
model['writable'] = os.access(os_path, os.W_OK)
|
||||
except OSError:
|
||||
self.log.error("Failed to check write permissions on %s", os_path)
|
||||
model['writable'] = False
|
||||
return model
|
||||
|
||||
def _dir_model(self, path, content=True):
|
||||
"""Build a model for a directory
|
||||
|
||||
if content is requested, will include a listing of the directory
|
||||
"""
|
||||
os_path = self._get_os_path(path)
|
||||
|
||||
four_o_four = f'directory does not exist: {path!r}'
|
||||
|
||||
if not os.path.isdir(os_path):
|
||||
raise web.HTTPError(404, four_o_four)
|
||||
elif is_hidden(os_path, self.root_dir) and not self.allow_hidden:
|
||||
self.log.info("Refusing to serve hidden directory %r, via 404 Error",
|
||||
os_path
|
||||
)
|
||||
raise web.HTTPError(404, four_o_four)
|
||||
|
||||
model = self._base_model(path)
|
||||
model['type'] = 'directory'
|
||||
model['size'] = None
|
||||
if content:
|
||||
model['content'] = contents = []
|
||||
os_dir = self._get_os_path(path)
|
||||
for name in os.listdir(os_dir):
|
||||
try:
|
||||
os_path = os.path.join(os_dir, name)
|
||||
except UnicodeDecodeError as e:
|
||||
self.log.warning(
|
||||
"failed to decode filename '%s': %s", name, e)
|
||||
continue
|
||||
|
||||
try:
|
||||
st = os.lstat(os_path)
|
||||
except OSError as e:
|
||||
# skip over broken symlinks in listing
|
||||
if e.errno == errno.ENOENT:
|
||||
self.log.warning("%s doesn't exist", os_path)
|
||||
else:
|
||||
self.log.warning("Error stat-ing %s: %s", os_path, e)
|
||||
continue
|
||||
|
||||
if (not stat.S_ISLNK(st.st_mode)
|
||||
and not stat.S_ISREG(st.st_mode)
|
||||
and not stat.S_ISDIR(st.st_mode)):
|
||||
self.log.debug("%s not a regular file", os_path)
|
||||
continue
|
||||
|
||||
try:
|
||||
if self.should_list(name):
|
||||
if self.allow_hidden or not is_file_hidden(os_path, stat_res=st):
|
||||
contents.append(
|
||||
self.get(path=f'{path}/{name}', content=False)
|
||||
)
|
||||
except OSError as e:
|
||||
# ELOOP: recursive symlink
|
||||
if e.errno != errno.ELOOP:
|
||||
self.log.warning(
|
||||
"Unknown error checking if file %r is hidden",
|
||||
os_path,
|
||||
exc_info=True,
|
||||
)
|
||||
model['format'] = 'json'
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def _file_model(self, path, content=True, format=None):
|
||||
"""Build a model for a file
|
||||
|
||||
if content is requested, include the file contents.
|
||||
|
||||
format:
|
||||
If 'text', the contents will be decoded as UTF-8.
|
||||
If 'base64', the raw bytes contents will be encoded as base64.
|
||||
If not specified, try to decode as UTF-8, and fall back to base64
|
||||
"""
|
||||
model = self._base_model(path)
|
||||
model['type'] = 'file'
|
||||
|
||||
os_path = self._get_os_path(path)
|
||||
model['mimetype'] = mimetypes.guess_type(os_path)[0]
|
||||
|
||||
if content:
|
||||
content, format = self._read_file(os_path, format)
|
||||
if model['mimetype'] is None:
|
||||
default_mime = {
|
||||
'text': 'text/plain',
|
||||
'base64': 'application/octet-stream'
|
||||
}[format]
|
||||
model['mimetype'] = default_mime
|
||||
|
||||
model.update(
|
||||
content=content,
|
||||
format=format,
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
def _notebook_model(self, path, content=True):
|
||||
"""Build a notebook model
|
||||
|
||||
if content is requested, the notebook content will be populated
|
||||
as a JSON structure (not double-serialized)
|
||||
"""
|
||||
model = self._base_model(path)
|
||||
model['type'] = 'notebook'
|
||||
os_path = self._get_os_path(path)
|
||||
|
||||
if content:
|
||||
nb = self._read_notebook(os_path, as_version=4)
|
||||
self.mark_trusted_cells(nb, path)
|
||||
model['content'] = nb
|
||||
model['format'] = 'json'
|
||||
self.validate_notebook_model(model)
|
||||
|
||||
return model
|
||||
|
||||
def get(self, path, content=True, type=None, format=None):
|
||||
""" Takes a path for an entity and returns its model
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : str
|
||||
the API path that describes the relative path for the target
|
||||
content : bool
|
||||
Whether to include the contents in the reply
|
||||
type : str, optional
|
||||
The requested type - 'file', 'notebook', or 'directory'.
|
||||
Will raise HTTPError 400 if the content doesn't match.
|
||||
format : str, optional
|
||||
The requested format for file contents. 'text' or 'base64'.
|
||||
Ignored if this returns a notebook or directory model.
|
||||
|
||||
Returns
|
||||
-------
|
||||
model : dict
|
||||
the contents model. If content=True, returns the contents
|
||||
of the file or directory as well.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
|
||||
if not self.exists(path):
|
||||
raise web.HTTPError(404, f'No such file or directory: {path}')
|
||||
|
||||
os_path = self._get_os_path(path)
|
||||
if os.path.isdir(os_path):
|
||||
if type not in (None, 'directory'):
|
||||
raise web.HTTPError(400,
|
||||
f'{path} is a directory, not a {type}', reason='bad type')
|
||||
model = self._dir_model(path, content=content)
|
||||
elif type == 'notebook' or (type is None and path.endswith('.ipynb')):
|
||||
model = self._notebook_model(path, content=content)
|
||||
else:
|
||||
if type == 'directory':
|
||||
raise web.HTTPError(
|
||||
400,
|
||||
f'{path} is not a directory', reason='bad type')
|
||||
model = self._file_model(path, content=content, format=format)
|
||||
return model
|
||||
|
||||
def _save_directory(self, os_path, model, path=''):
|
||||
"""create a directory"""
|
||||
if is_hidden(os_path, self.root_dir) and not self.allow_hidden:
|
||||
raise web.HTTPError(400, f'Cannot create hidden directory {os_path!r}')
|
||||
if not os.path.exists(os_path):
|
||||
with self.perm_to_403():
|
||||
os.mkdir(os_path)
|
||||
elif not os.path.isdir(os_path):
|
||||
raise web.HTTPError(400, f'Not a directory: {os_path}')
|
||||
else:
|
||||
self.log.debug("Directory %r already exists", os_path)
|
||||
|
||||
def save(self, model, path=''):
|
||||
"""Save the file model and return the model with no content."""
|
||||
path = path.strip('/')
|
||||
|
||||
if 'type' not in model:
|
||||
raise web.HTTPError(400, 'No file type provided')
|
||||
if 'content' not in model and model['type'] != 'directory':
|
||||
raise web.HTTPError(400, 'No file content provided')
|
||||
|
||||
os_path = self._get_os_path(path)
|
||||
self.log.debug("Saving %s", os_path)
|
||||
|
||||
self.run_pre_save_hook(model=model, path=path)
|
||||
|
||||
try:
|
||||
if model['type'] == 'notebook':
|
||||
nb = nbformat.from_dict(model['content'])
|
||||
self.check_and_sign(nb, path)
|
||||
self._save_notebook(os_path, nb)
|
||||
# One checkpoint should always exist for notebooks.
|
||||
if not self.checkpoints.list_checkpoints(path):
|
||||
self.create_checkpoint(path)
|
||||
elif model['type'] == 'file':
|
||||
# Missing format will be handled internally by _save_file.
|
||||
self._save_file(os_path, model['content'], model.get('format'))
|
||||
elif model['type'] == 'directory':
|
||||
self._save_directory(os_path, model, path)
|
||||
else:
|
||||
raise web.HTTPError(400, f"Unhandled contents type: {model['type']}")
|
||||
except web.HTTPError:
|
||||
raise
|
||||
except Exception as e:
|
||||
self.log.error('Error while saving file: %s %s', path, e, exc_info=True)
|
||||
raise web.HTTPError(500, f'Unexpected error while saving file: {path} {e}') from e
|
||||
|
||||
validation_message = None
|
||||
if model['type'] == 'notebook':
|
||||
self.validate_notebook_model(model)
|
||||
validation_message = model.get('message', None)
|
||||
|
||||
model = self.get(path, content=False)
|
||||
if validation_message:
|
||||
model['message'] = validation_message
|
||||
|
||||
self.run_post_save_hook(model=model, os_path=os_path)
|
||||
|
||||
return model
|
||||
|
||||
def delete_file(self, path):
|
||||
"""Delete file at path."""
|
||||
path = path.strip('/')
|
||||
os_path = self._get_os_path(path)
|
||||
rm = os.unlink
|
||||
if not os.path.exists(os_path):
|
||||
raise web.HTTPError(404, f'File or directory does not exist: {os_path}')
|
||||
|
||||
def is_non_empty_dir(os_path):
|
||||
if os.path.isdir(os_path):
|
||||
# A directory containing only leftover checkpoints is
|
||||
# considered empty.
|
||||
cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None)
|
||||
if set(os.listdir(os_path)) - {cp_dir}:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if self.delete_to_trash:
|
||||
if sys.platform == 'win32' and is_non_empty_dir(os_path):
|
||||
# send2trash can really delete files on Windows, so disallow
|
||||
# deleting non-empty files. See Github issue 3631.
|
||||
raise web.HTTPError(400, f'Directory {os_path} not empty')
|
||||
try:
|
||||
self.log.debug("Sending %s to trash", os_path)
|
||||
send2trash(os_path)
|
||||
return
|
||||
except TrashPermissionError as e:
|
||||
self.log.warning("Skipping trash for %s, %s", os_path, e)
|
||||
|
||||
if os.path.isdir(os_path):
|
||||
# Don't permanently delete non-empty directories.
|
||||
if is_non_empty_dir(os_path):
|
||||
raise web.HTTPError(400, f'Directory {os_path} not empty')
|
||||
self.log.debug("Removing directory %s", os_path)
|
||||
with self.perm_to_403():
|
||||
shutil.rmtree(os_path)
|
||||
else:
|
||||
self.log.debug("Unlinking file %s", os_path)
|
||||
with self.perm_to_403():
|
||||
rm(os_path)
|
||||
|
||||
def rename_file(self, old_path, new_path):
|
||||
"""Rename a file."""
|
||||
old_path = old_path.strip('/')
|
||||
new_path = new_path.strip('/')
|
||||
if new_path == old_path:
|
||||
return
|
||||
|
||||
# Perform path validation prior to converting to os-specific value since this
|
||||
# is still relative to root_dir.
|
||||
self._validate_path(new_path)
|
||||
|
||||
new_os_path = self._get_os_path(new_path)
|
||||
old_os_path = self._get_os_path(old_path)
|
||||
|
||||
# Should we proceed with the move?
|
||||
if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path):
|
||||
raise web.HTTPError(409, f'File already exists: {new_path}')
|
||||
|
||||
# Move the file
|
||||
try:
|
||||
with self.perm_to_403():
|
||||
shutil.move(old_os_path, new_os_path)
|
||||
except web.HTTPError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise web.HTTPError(500, f'Unknown error renaming file: {old_path} {e}') from e
|
||||
|
||||
def info_string(self):
|
||||
return _("Serving notebooks from local directory: %s") % self.root_dir
|
||||
|
||||
def get_kernel_path(self, path, model=None):
|
||||
"""Return the initial API path of a kernel associated with a given notebook"""
|
||||
if self.dir_exists(path):
|
||||
return path
|
||||
if '/' in path:
|
||||
parent_dir = path.rsplit('/', 1)[0]
|
||||
else:
|
||||
parent_dir = ''
|
||||
return parent_dir
|
||||
|
||||
@staticmethod
|
||||
def _validate_path(path):
|
||||
"""Checks if the path contains invalid characters relative to the current platform"""
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# On Windows systems, we MUST disallow colons otherwise an Alternative Data Stream will
|
||||
# be created and confusion will reign! (See https://github.com/jupyter/notebook/issues/5190)
|
||||
# Go ahead and add other invalid (and non-path-separator) characters here as well so there's
|
||||
# consistent behavior - although all others will result in '[Errno 22]Invalid Argument' errors.
|
||||
invalid_chars = '?:><*"|'
|
||||
else:
|
||||
# On non-windows systems, allow the underlying file creation to perform enforcement when appropriate
|
||||
invalid_chars = ''
|
||||
|
||||
for char in invalid_chars:
|
||||
if char in path:
|
||||
raise web.HTTPError(400, f"Path '{path}' contains characters that are invalid for the filesystem. "
|
||||
f"Path names on this filesystem cannot contain any of the following "
|
||||
f"characters: {invalid_chars}")
|
329
.venv/Lib/site-packages/notebook/services/contents/handlers.py
Normal file
329
.venv/Lib/site-packages/notebook/services/contents/handlers.py
Normal file
@ -0,0 +1,329 @@
|
||||
"""Tornado handlers for the contents web service.
|
||||
|
||||
Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-27%3A-Contents-Service
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import json
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
from notebook.utils import maybe_future, url_path_join, url_escape
|
||||
try:
|
||||
from jupyter_client.jsonutil import json_default
|
||||
except ImportError:
|
||||
from jupyter_client.jsonutil import (
|
||||
date_default as json_default
|
||||
)
|
||||
|
||||
from notebook.base.handlers import (
|
||||
IPythonHandler, APIHandler, path_regex,
|
||||
)
|
||||
|
||||
|
||||
def validate_model(model, expect_content):
|
||||
"""
|
||||
Validate a model returned by a ContentsManager method.
|
||||
|
||||
If expect_content is True, then we expect non-null entries for 'content'
|
||||
and 'format'.
|
||||
"""
|
||||
required_keys = {
|
||||
"name",
|
||||
"path",
|
||||
"type",
|
||||
"writable",
|
||||
"created",
|
||||
"last_modified",
|
||||
"mimetype",
|
||||
"content",
|
||||
"format",
|
||||
}
|
||||
missing = required_keys - set(model.keys())
|
||||
if missing:
|
||||
raise web.HTTPError(
|
||||
500,
|
||||
f"Missing Model Keys: {missing}",
|
||||
)
|
||||
|
||||
maybe_none_keys = ['content', 'format']
|
||||
if expect_content:
|
||||
errors = [key for key in maybe_none_keys if model[key] is None]
|
||||
if errors:
|
||||
raise web.HTTPError(
|
||||
500,
|
||||
f"Keys unexpectedly None: {errors}",
|
||||
)
|
||||
else:
|
||||
errors = {
|
||||
key: model[key]
|
||||
for key in maybe_none_keys
|
||||
if model[key] is not None
|
||||
}
|
||||
if errors:
|
||||
raise web.HTTPError(
|
||||
500,
|
||||
f"Keys unexpectedly not None: {errors}",
|
||||
)
|
||||
|
||||
|
||||
class ContentsHandler(APIHandler):
|
||||
|
||||
def location_url(self, path):
|
||||
"""Return the full URL location of a file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : unicode
|
||||
The API path of the file, such as "foo/bar.txt".
|
||||
"""
|
||||
return url_path_join(
|
||||
self.base_url, 'api', 'contents', url_escape(path)
|
||||
)
|
||||
|
||||
def _finish_model(self, model, location=True):
|
||||
"""Finish a JSON request with a model, setting relevant headers, etc."""
|
||||
if location:
|
||||
location = self.location_url(model['path'])
|
||||
self.set_header('Location', location)
|
||||
self.set_header('Last-Modified', model['last_modified'])
|
||||
self.set_header('Content-Type', 'application/json')
|
||||
self.finish(json.dumps(model, default=json_default))
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def get(self, path=''):
|
||||
"""Return a model for a file or directory.
|
||||
|
||||
A directory model contains a list of models (without content)
|
||||
of the files and directories it contains.
|
||||
"""
|
||||
path = path or ''
|
||||
type = self.get_query_argument('type', default=None)
|
||||
if type not in {None, 'directory', 'file', 'notebook'}:
|
||||
raise web.HTTPError(400, f'Type {type!r} is invalid')
|
||||
|
||||
format = self.get_query_argument('format', default=None)
|
||||
if format not in {None, 'text', 'base64'}:
|
||||
raise web.HTTPError(400, f'Format {format!r} is invalid')
|
||||
content = self.get_query_argument('content', default='1')
|
||||
if content not in {'0', '1'}:
|
||||
raise web.HTTPError(400, f'Content {content!r} is invalid')
|
||||
content = int(content)
|
||||
|
||||
model = yield maybe_future(self.contents_manager.get(
|
||||
path=path, type=type, format=format, content=content,
|
||||
))
|
||||
validate_model(model, expect_content=content)
|
||||
self._finish_model(model, location=False)
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def patch(self, path=''):
|
||||
"""PATCH renames a file or directory without re-uploading content."""
|
||||
cm = self.contents_manager
|
||||
model = self.get_json_body()
|
||||
if model is None:
|
||||
raise web.HTTPError(400, 'JSON body missing')
|
||||
model = yield maybe_future(cm.update(model, path))
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@gen.coroutine
|
||||
def _copy(self, copy_from, copy_to=None):
|
||||
"""Copy a file, optionally specifying a target directory."""
|
||||
self.log.info(f"Copying {copy_from} to {copy_to or ''}")
|
||||
model = yield maybe_future(self.contents_manager.copy(copy_from, copy_to))
|
||||
self.set_status(201)
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@gen.coroutine
|
||||
def _upload(self, model, path):
|
||||
"""Handle upload of a new file to path"""
|
||||
self.log.info("Uploading file to %s", path)
|
||||
model = yield maybe_future(self.contents_manager.new(model, path))
|
||||
self.set_status(201)
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@gen.coroutine
|
||||
def _new_untitled(self, path, type='', ext=''):
|
||||
"""Create a new, empty untitled entity"""
|
||||
self.log.info("Creating new %s in %s", type or 'file', path)
|
||||
model = yield maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext))
|
||||
self.set_status(201)
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@gen.coroutine
|
||||
def _save(self, model, path):
|
||||
"""Save an existing file."""
|
||||
chunk = model.get("chunk", None)
|
||||
if not chunk or chunk == -1: # Avoid tedious log information
|
||||
self.log.info("Saving file at %s", path)
|
||||
model = yield maybe_future(self.contents_manager.save(model, path))
|
||||
validate_model(model, expect_content=False)
|
||||
self._finish_model(model)
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def post(self, path=''):
|
||||
"""Create a new file in the specified path.
|
||||
|
||||
POST creates new files. The server always decides on the name.
|
||||
|
||||
POST /api/contents/path
|
||||
New untitled, empty file or directory.
|
||||
POST /api/contents/path
|
||||
with body {"copy_from" : "/path/to/OtherNotebook.ipynb"}
|
||||
New copy of OtherNotebook in path
|
||||
"""
|
||||
|
||||
cm = self.contents_manager
|
||||
|
||||
file_exists = yield maybe_future(cm.file_exists(path))
|
||||
if file_exists:
|
||||
raise web.HTTPError(400, "Cannot POST to files, use PUT instead.")
|
||||
|
||||
dir_exists = yield maybe_future(cm.dir_exists(path))
|
||||
if not dir_exists:
|
||||
raise web.HTTPError(404, f"No such directory: {path}")
|
||||
|
||||
model = self.get_json_body()
|
||||
|
||||
if model is not None:
|
||||
copy_from = model.get('copy_from')
|
||||
ext = model.get('ext', '')
|
||||
type = model.get('type', '')
|
||||
if copy_from:
|
||||
yield self._copy(copy_from, path)
|
||||
else:
|
||||
yield self._new_untitled(path, type=type, ext=ext)
|
||||
else:
|
||||
yield self._new_untitled(path)
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def put(self, path=''):
|
||||
"""Saves the file in the location specified by name and path.
|
||||
|
||||
PUT is very similar to POST, but the requester specifies the name,
|
||||
whereas with POST, the server picks the name.
|
||||
|
||||
PUT /api/contents/path/Name.ipynb
|
||||
Save notebook at ``path/Name.ipynb``. Notebook structure is specified
|
||||
in `content` key of JSON request body. If content is not specified,
|
||||
create a new empty notebook.
|
||||
"""
|
||||
model = self.get_json_body()
|
||||
if model:
|
||||
if model.get('copy_from'):
|
||||
raise web.HTTPError(400, "Cannot copy with PUT, only POST")
|
||||
exists = yield maybe_future(self.contents_manager.file_exists(path))
|
||||
if exists:
|
||||
yield maybe_future(self._save(model, path))
|
||||
else:
|
||||
yield maybe_future(self._upload(model, path))
|
||||
else:
|
||||
yield maybe_future(self._new_untitled(path))
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def delete(self, path=''):
|
||||
"""delete a file in the given path"""
|
||||
cm = self.contents_manager
|
||||
self.log.warning('delete %s', path)
|
||||
yield maybe_future(cm.delete(path))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
|
||||
class CheckpointsHandler(APIHandler):
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def get(self, path=''):
|
||||
"""get lists checkpoints for a file"""
|
||||
cm = self.contents_manager
|
||||
checkpoints = yield maybe_future(cm.list_checkpoints(path))
|
||||
data = json.dumps(checkpoints, default=json_default)
|
||||
self.finish(data)
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def post(self, path=''):
|
||||
"""post creates a new checkpoint"""
|
||||
cm = self.contents_manager
|
||||
checkpoint = yield maybe_future(cm.create_checkpoint(path))
|
||||
data = json.dumps(checkpoint, default=json_default)
|
||||
location = url_path_join(self.base_url, 'api/contents',
|
||||
url_escape(path), 'checkpoints', url_escape(checkpoint['id']))
|
||||
self.set_header('Location', location)
|
||||
self.set_status(201)
|
||||
self.finish(data)
|
||||
|
||||
|
||||
class ModifyCheckpointsHandler(APIHandler):
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def post(self, path, checkpoint_id):
|
||||
"""post restores a file from a checkpoint"""
|
||||
cm = self.contents_manager
|
||||
yield maybe_future(cm.restore_checkpoint(checkpoint_id, path))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def delete(self, path, checkpoint_id):
|
||||
"""delete clears a checkpoint for a given file"""
|
||||
cm = self.contents_manager
|
||||
yield maybe_future(cm.delete_checkpoint(checkpoint_id, path))
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
|
||||
class NotebooksRedirectHandler(IPythonHandler):
|
||||
"""Redirect /api/notebooks to /api/contents"""
|
||||
SUPPORTED_METHODS = ('GET', 'PUT', 'PATCH', 'POST', 'DELETE')
|
||||
|
||||
def get(self, path):
|
||||
self.log.warning("/api/notebooks is deprecated, use /api/contents")
|
||||
self.redirect(url_path_join(
|
||||
self.base_url,
|
||||
'api/contents',
|
||||
path
|
||||
))
|
||||
|
||||
put = patch = post = delete = get
|
||||
|
||||
|
||||
class TrustNotebooksHandler(IPythonHandler):
|
||||
""" Handles trust/signing of notebooks """
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def post(self,path=''):
|
||||
cm = self.contents_manager
|
||||
yield maybe_future(cm.trust_notebook(path))
|
||||
self.set_status(201)
|
||||
self.finish()
|
||||
#-----------------------------------------------------------------------------
|
||||
# URL to handler mappings
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
_checkpoint_id_regex = r"(?P<checkpoint_id>[\w-]+)"
|
||||
|
||||
default_handlers = [
|
||||
(fr"/api/contents{path_regex}/checkpoints", CheckpointsHandler),
|
||||
(fr"/api/contents{path_regex}/checkpoints/{_checkpoint_id_regex}",
|
||||
ModifyCheckpointsHandler),
|
||||
(fr"/api/contents{path_regex}/trust", TrustNotebooksHandler),
|
||||
(fr"/api/contents{path_regex}", ContentsHandler),
|
||||
(r"/api/notebooks/?(.*)", NotebooksRedirectHandler),
|
||||
]
|
@ -0,0 +1,68 @@
|
||||
from notebook.services.contents.filemanager import FileContentsManager
|
||||
from tornado import web
|
||||
import base64
|
||||
import os
|
||||
|
||||
class LargeFileManager(FileContentsManager):
|
||||
"""Handle large file upload."""
|
||||
|
||||
def save(self, model, path=''):
|
||||
"""Save the file model and return the model with no content."""
|
||||
chunk = model.get('chunk', None)
|
||||
if chunk is not None:
|
||||
path = path.strip('/')
|
||||
|
||||
if 'type' not in model:
|
||||
raise web.HTTPError(400, 'No file type provided')
|
||||
if model['type'] != 'file':
|
||||
raise web.HTTPError(400, f'File type "{model["type"]}" is not supported for large file transfer')
|
||||
if 'content' not in model and model['type'] != 'directory':
|
||||
raise web.HTTPError(400, 'No file content provided')
|
||||
|
||||
os_path = self._get_os_path(path)
|
||||
|
||||
try:
|
||||
if chunk == 1:
|
||||
self.log.debug("Saving %s", os_path)
|
||||
self.run_pre_save_hook(model=model, path=path)
|
||||
super()._save_file(os_path, model['content'], model.get('format'))
|
||||
else:
|
||||
self._save_large_file(os_path, model['content'], model.get('format'))
|
||||
except web.HTTPError:
|
||||
raise
|
||||
except Exception as e:
|
||||
self.log.error('Error while saving file: %s %s', path, e, exc_info=True)
|
||||
raise web.HTTPError(500, f'Unexpected error while saving file: {path} {e}') from e
|
||||
|
||||
model = self.get(path, content=False)
|
||||
|
||||
# Last chunk
|
||||
if chunk == -1:
|
||||
self.run_post_save_hook(model=model, os_path=os_path)
|
||||
return model
|
||||
else:
|
||||
return super().save(model, path)
|
||||
|
||||
def _save_large_file(self, os_path, content, format):
|
||||
"""Save content of a generic file."""
|
||||
if format not in {'text', 'base64'}:
|
||||
raise web.HTTPError(
|
||||
400,
|
||||
"Must specify format of file contents as 'text' or 'base64'",
|
||||
)
|
||||
try:
|
||||
if format == 'text':
|
||||
bcontent = content.encode('utf8')
|
||||
else:
|
||||
b64_bytes = content.encode('ascii')
|
||||
bcontent = base64.b64decode(b64_bytes)
|
||||
except Exception as e:
|
||||
raise web.HTTPError(
|
||||
400, f'Encoding error saving {os_path}: {e}'
|
||||
) from e
|
||||
|
||||
with self.perm_to_403(os_path):
|
||||
if os.path.islink(os_path):
|
||||
os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path))
|
||||
with open(os_path, 'ab') as f:
|
||||
f.write(bcontent)
|
530
.venv/Lib/site-packages/notebook/services/contents/manager.py
Normal file
530
.venv/Lib/site-packages/notebook/services/contents/manager.py
Normal file
@ -0,0 +1,530 @@
|
||||
"""A base class for contents managers."""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from fnmatch import fnmatch
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
from tornado.web import HTTPError, RequestHandler
|
||||
|
||||
from ...files.handlers import FilesHandler
|
||||
from .checkpoints import Checkpoints
|
||||
from traitlets.config.configurable import LoggingConfigurable
|
||||
from nbformat import sign, validate as validate_nb, ValidationError
|
||||
from nbformat.v4 import new_notebook
|
||||
from ipython_genutils.importstring import import_item
|
||||
from traitlets import (
|
||||
Any,
|
||||
Bool,
|
||||
Dict,
|
||||
Instance,
|
||||
List,
|
||||
TraitError,
|
||||
Type,
|
||||
Unicode,
|
||||
validate,
|
||||
default,
|
||||
)
|
||||
from ipython_genutils.py3compat import string_types
|
||||
from notebook.base.handlers import IPythonHandler
|
||||
from notebook.transutils import _
|
||||
|
||||
|
||||
copy_pat = re.compile(r'\-Copy\d*\.')
|
||||
|
||||
|
||||
class ContentsManager(LoggingConfigurable):
|
||||
"""Base class for serving files and directories.
|
||||
|
||||
This serves any text or binary file,
|
||||
as well as directories,
|
||||
with special handling for JSON notebook documents.
|
||||
|
||||
Most APIs take a path argument,
|
||||
which is always an API-style unicode path,
|
||||
and always refers to a directory.
|
||||
|
||||
- unicode, not url-escaped
|
||||
- '/'-separated
|
||||
- leading and trailing '/' will be stripped
|
||||
- if unspecified, path defaults to '',
|
||||
indicating the root path.
|
||||
|
||||
"""
|
||||
|
||||
root_dir = Unicode('/', config=True)
|
||||
|
||||
allow_hidden = Bool(False, config=True, help="Allow access to hidden files")
|
||||
|
||||
notary = Instance(sign.NotebookNotary)
|
||||
def _notary_default(self):
|
||||
return sign.NotebookNotary(parent=self)
|
||||
|
||||
hide_globs = List(Unicode(), [
|
||||
'__pycache__', '*.pyc', '*.pyo',
|
||||
'.DS_Store', '*.so', '*.dylib', '*~',
|
||||
], config=True, help="""
|
||||
Glob patterns to hide in file and directory listings.
|
||||
""")
|
||||
|
||||
untitled_notebook = Unicode(_("Untitled"), config=True,
|
||||
help="The base name used when creating untitled notebooks."
|
||||
)
|
||||
|
||||
untitled_file = Unicode("untitled", config=True,
|
||||
help="The base name used when creating untitled files."
|
||||
)
|
||||
|
||||
untitled_directory = Unicode("Untitled Folder", config=True,
|
||||
help="The base name used when creating untitled directories."
|
||||
)
|
||||
|
||||
pre_save_hook = Any(None, config=True, allow_none=True,
|
||||
help="""Python callable or importstring thereof
|
||||
|
||||
To be called on a contents model prior to save.
|
||||
|
||||
This can be used to process the structure,
|
||||
such as removing notebook outputs or other side effects that
|
||||
should not be saved.
|
||||
|
||||
It will be called as (all arguments passed by keyword)::
|
||||
|
||||
hook(path=path, model=model, contents_manager=self)
|
||||
|
||||
- model: the model to be saved. Includes file contents.
|
||||
Modifying this dict will affect the file that is stored.
|
||||
- path: the API path of the save destination
|
||||
- contents_manager: this ContentsManager instance
|
||||
"""
|
||||
)
|
||||
|
||||
@validate('pre_save_hook')
|
||||
def _validate_pre_save_hook(self, proposal):
|
||||
value = proposal['value']
|
||||
if isinstance(value, string_types):
|
||||
value = import_item(self.pre_save_hook)
|
||||
if not callable(value):
|
||||
raise TraitError("pre_save_hook must be callable")
|
||||
return value
|
||||
|
||||
def run_pre_save_hook(self, model, path, **kwargs):
|
||||
"""Run the pre-save hook if defined, and log errors"""
|
||||
if self.pre_save_hook:
|
||||
try:
|
||||
self.log.debug("Running pre-save hook on %s", path)
|
||||
self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs)
|
||||
except Exception:
|
||||
self.log.error("Pre-save hook failed on %s", path, exc_info=True)
|
||||
|
||||
checkpoints_class = Type(Checkpoints, config=True)
|
||||
checkpoints = Instance(Checkpoints, config=True)
|
||||
checkpoints_kwargs = Dict(config=True)
|
||||
|
||||
@default('checkpoints')
|
||||
def _default_checkpoints(self):
|
||||
return self.checkpoints_class(**self.checkpoints_kwargs)
|
||||
|
||||
@default('checkpoints_kwargs')
|
||||
def _default_checkpoints_kwargs(self):
|
||||
return dict(
|
||||
parent=self,
|
||||
log=self.log,
|
||||
)
|
||||
|
||||
files_handler_class = Type(
|
||||
FilesHandler, klass=RequestHandler, allow_none=True, config=True,
|
||||
help="""handler class to use when serving raw file requests.
|
||||
|
||||
Default is a fallback that talks to the ContentsManager API,
|
||||
which may be inefficient, especially for large files.
|
||||
|
||||
Local files-based ContentsManagers can use a StaticFileHandler subclass,
|
||||
which will be much more efficient.
|
||||
|
||||
Access to these files should be Authenticated.
|
||||
"""
|
||||
)
|
||||
|
||||
files_handler_params = Dict(
|
||||
config=True,
|
||||
help="""Extra parameters to pass to files_handler_class.
|
||||
|
||||
For example, StaticFileHandlers generally expect a `path` argument
|
||||
specifying the root directory from which to serve files.
|
||||
"""
|
||||
)
|
||||
|
||||
def get_extra_handlers(self):
|
||||
"""Return additional handlers
|
||||
|
||||
Default: self.files_handler_class on /files/.*
|
||||
"""
|
||||
handlers = []
|
||||
if self.files_handler_class:
|
||||
handlers.append(
|
||||
(r"/files/(.*)", self.files_handler_class, self.files_handler_params)
|
||||
)
|
||||
return handlers
|
||||
|
||||
# ContentsManager API part 1: methods that must be
|
||||
# implemented in subclasses.
|
||||
|
||||
def dir_exists(self, path):
|
||||
"""Does a directory exist at the given path?
|
||||
|
||||
Like os.path.isdir
|
||||
|
||||
Override this method in subclasses.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The path to check
|
||||
|
||||
Returns
|
||||
-------
|
||||
exists : bool
|
||||
Whether the path does indeed exist.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def is_hidden(self, path):
|
||||
"""Is path a hidden directory or file?
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The path to check. This is an API path (`/` separated,
|
||||
relative to root dir).
|
||||
|
||||
Returns
|
||||
-------
|
||||
hidden : bool
|
||||
Whether the path is hidden.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def file_exists(self, path=''):
|
||||
"""Does a file exist at the given path?
|
||||
|
||||
Like os.path.isfile
|
||||
|
||||
Override this method in subclasses.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The API path of a file to check for.
|
||||
|
||||
Returns
|
||||
-------
|
||||
exists : bool
|
||||
Whether the file exists.
|
||||
"""
|
||||
raise NotImplementedError('must be implemented in a subclass')
|
||||
|
||||
def exists(self, path):
|
||||
"""Does a file or directory exist at the given path?
|
||||
|
||||
Like os.path.exists
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The API path of a file or directory to check for.
|
||||
|
||||
Returns
|
||||
-------
|
||||
exists : bool
|
||||
Whether the target exists.
|
||||
"""
|
||||
return self.file_exists(path) or self.dir_exists(path)
|
||||
|
||||
def get(self, path, content=True, type=None, format=None):
|
||||
"""Get a file or directory model."""
|
||||
raise NotImplementedError('must be implemented in a subclass')
|
||||
|
||||
def save(self, model, path):
|
||||
"""
|
||||
Save a file or directory model to path.
|
||||
|
||||
Should return the saved model with no content. Save implementations
|
||||
should call self.run_pre_save_hook(model=model, path=path) prior to
|
||||
writing any data.
|
||||
"""
|
||||
raise NotImplementedError('must be implemented in a subclass')
|
||||
|
||||
def delete_file(self, path):
|
||||
"""Delete the file or directory at path."""
|
||||
raise NotImplementedError('must be implemented in a subclass')
|
||||
|
||||
def rename_file(self, old_path, new_path):
|
||||
"""Rename a file or directory."""
|
||||
raise NotImplementedError('must be implemented in a subclass')
|
||||
|
||||
# ContentsManager API part 2: methods that have useable default
|
||||
# implementations, but can be overridden in subclasses.
|
||||
|
||||
def delete(self, path):
|
||||
"""Delete a file/directory and any associated checkpoints."""
|
||||
path = path.strip('/')
|
||||
if not path:
|
||||
raise HTTPError(400, "Can't delete root")
|
||||
self.delete_file(path)
|
||||
self.checkpoints.delete_all_checkpoints(path)
|
||||
|
||||
def rename(self, old_path, new_path):
|
||||
"""Rename a file and any checkpoints associated with that file."""
|
||||
self.rename_file(old_path, new_path)
|
||||
self.checkpoints.rename_all_checkpoints(old_path, new_path)
|
||||
|
||||
def update(self, model, path):
|
||||
"""Update the file's path
|
||||
|
||||
For use in PATCH requests, to enable renaming a file without
|
||||
re-uploading its contents. Only used for renaming at the moment.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
new_path = model.get('path', path).strip('/')
|
||||
if path != new_path:
|
||||
self.rename(path, new_path)
|
||||
model = self.get(new_path, content=False)
|
||||
return model
|
||||
|
||||
def info_string(self):
|
||||
return "Serving contents"
|
||||
|
||||
def get_kernel_path(self, path, model=None):
|
||||
"""Return the API path for the kernel
|
||||
|
||||
KernelManagers can turn this value into a filesystem path,
|
||||
or ignore it altogether.
|
||||
|
||||
The default value here will start kernels in the directory of the
|
||||
notebook server. FileContentsManager overrides this to use the
|
||||
directory containing the notebook.
|
||||
"""
|
||||
return ''
|
||||
|
||||
def increment_filename(self, filename, path='', insert=''):
|
||||
"""Increment a filename until it is unique.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filename : unicode
|
||||
The name of a file, including extension
|
||||
path : unicode
|
||||
The API path of the target's directory
|
||||
insert: unicode
|
||||
The characters to insert after the base filename
|
||||
|
||||
Returns
|
||||
-------
|
||||
name : unicode
|
||||
A filename that is unique, based on the input filename.
|
||||
"""
|
||||
# Extract the full suffix from the filename (e.g. .tar.gz)
|
||||
path = path.strip('/')
|
||||
basename, dot, ext = filename.rpartition('.')
|
||||
if ext != 'ipynb':
|
||||
basename, dot, ext = filename.partition('.')
|
||||
|
||||
suffix = dot + ext
|
||||
|
||||
for i in itertools.count():
|
||||
if i:
|
||||
insert_i = f'{insert}{i}'
|
||||
else:
|
||||
insert_i = ''
|
||||
name = f'{basename}{insert_i}{suffix}'
|
||||
if not self.exists(f'{path}/{name}'):
|
||||
break
|
||||
return name
|
||||
|
||||
def validate_notebook_model(self, model):
|
||||
"""Add failed-validation message to model"""
|
||||
try:
|
||||
validate_nb(model['content'])
|
||||
except ValidationError as e:
|
||||
model['message'] = f'Notebook validation failed: {e.message}:\n' \
|
||||
f'{json.dumps(e.instance, indent=1, default=lambda obj: "<UNKNOWN>")}'
|
||||
return model
|
||||
|
||||
def new_untitled(self, path='', type='', ext=''):
|
||||
"""Create a new untitled file or directory in path
|
||||
|
||||
path must be a directory
|
||||
|
||||
File extension can be specified.
|
||||
|
||||
Use `new` to create files with a fully specified path (including filename).
|
||||
"""
|
||||
path = path.strip('/')
|
||||
if not self.dir_exists(path):
|
||||
raise HTTPError(404, f'No such directory: {path}')
|
||||
|
||||
model = {}
|
||||
if type:
|
||||
model['type'] = type
|
||||
|
||||
if ext == '.ipynb':
|
||||
model.setdefault('type', 'notebook')
|
||||
else:
|
||||
model.setdefault('type', 'file')
|
||||
|
||||
insert = ''
|
||||
if model['type'] == 'directory':
|
||||
untitled = self.untitled_directory
|
||||
insert = ' '
|
||||
elif model['type'] == 'notebook':
|
||||
untitled = self.untitled_notebook
|
||||
ext = '.ipynb'
|
||||
elif model['type'] == 'file':
|
||||
untitled = self.untitled_file
|
||||
else:
|
||||
raise HTTPError(400, f"Unexpected model type: {model['type']!r}")
|
||||
|
||||
name = self.increment_filename(untitled + ext, path, insert=insert)
|
||||
path = f'{path}/{name}'
|
||||
return self.new(model, path)
|
||||
|
||||
def new(self, model=None, path=''):
|
||||
"""Create a new file or directory and return its model with no content.
|
||||
|
||||
To create a new untitled entity in a directory, use `new_untitled`.
|
||||
"""
|
||||
path = path.strip('/')
|
||||
if model is None:
|
||||
model = {}
|
||||
|
||||
if path.endswith('.ipynb'):
|
||||
model.setdefault('type', 'notebook')
|
||||
else:
|
||||
model.setdefault('type', 'file')
|
||||
|
||||
# no content, not a directory, so fill out new-file model
|
||||
if 'content' not in model and model['type'] != 'directory':
|
||||
if model['type'] == 'notebook':
|
||||
model['content'] = new_notebook()
|
||||
model['format'] = 'json'
|
||||
else:
|
||||
model['content'] = ''
|
||||
model['type'] = 'file'
|
||||
model['format'] = 'text'
|
||||
|
||||
model = self.save(model, path)
|
||||
return model
|
||||
|
||||
def copy(self, from_path, to_path=None):
|
||||
"""Copy an existing file and return its new model.
|
||||
|
||||
If to_path not specified, it will be the parent directory of from_path.
|
||||
If to_path is a directory, filename will increment `from_path-Copy#.ext`.
|
||||
Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`.
|
||||
For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot.
|
||||
|
||||
from_path must be a full path to a file.
|
||||
"""
|
||||
path = from_path.strip('/')
|
||||
if to_path is not None:
|
||||
to_path = to_path.strip('/')
|
||||
|
||||
if '/' in path:
|
||||
from_dir, from_name = path.rsplit('/', 1)
|
||||
else:
|
||||
from_dir = ''
|
||||
from_name = path
|
||||
|
||||
model = self.get(path)
|
||||
model.pop('path', None)
|
||||
model.pop('name', None)
|
||||
if model['type'] == 'directory':
|
||||
raise HTTPError(400, "Can't copy directories")
|
||||
|
||||
if to_path is None:
|
||||
to_path = from_dir
|
||||
if self.dir_exists(to_path):
|
||||
name = copy_pat.sub('.', from_name)
|
||||
to_name = self.increment_filename(name, to_path, insert='-Copy')
|
||||
to_path = f'{to_path}/{to_name}'
|
||||
|
||||
model = self.save(model, to_path)
|
||||
return model
|
||||
|
||||
def log_info(self):
|
||||
self.log.info(self.info_string())
|
||||
|
||||
def trust_notebook(self, path):
|
||||
"""Explicitly trust a notebook
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : string
|
||||
The path of a notebook
|
||||
"""
|
||||
model = self.get(path)
|
||||
nb = model['content']
|
||||
self.log.warning("Trusting notebook %s", path)
|
||||
self.notary.mark_cells(nb, True)
|
||||
self.check_and_sign(nb, path)
|
||||
|
||||
def check_and_sign(self, nb, path=''):
|
||||
"""Check for trusted cells, and sign the notebook.
|
||||
|
||||
Called as a part of saving notebooks.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nb : dict
|
||||
The notebook dict
|
||||
path : string
|
||||
The notebook's path (for logging)
|
||||
"""
|
||||
if self.notary.check_cells(nb):
|
||||
self.notary.sign(nb)
|
||||
else:
|
||||
self.log.warning("Notebook %s is not trusted", path)
|
||||
|
||||
def mark_trusted_cells(self, nb, path=''):
|
||||
"""Mark cells as trusted if the notebook signature matches.
|
||||
|
||||
Called as a part of loading notebooks.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nb : dict
|
||||
The notebook object (in current nbformat)
|
||||
path : string
|
||||
The notebook's path (for logging)
|
||||
"""
|
||||
trusted = self.notary.check_signature(nb)
|
||||
if not trusted:
|
||||
self.log.warning("Notebook %s is not trusted", path)
|
||||
self.notary.mark_cells(nb, trusted)
|
||||
|
||||
def should_list(self, name):
|
||||
"""Should this file/directory name be displayed in a listing?"""
|
||||
return not any(fnmatch(name, glob) for glob in self.hide_globs)
|
||||
|
||||
# Part 3: Checkpoints API
|
||||
def create_checkpoint(self, path):
|
||||
"""Create a checkpoint."""
|
||||
return self.checkpoints.create_checkpoint(self, path)
|
||||
|
||||
def restore_checkpoint(self, checkpoint_id, path):
|
||||
"""
|
||||
Restore a checkpoint.
|
||||
"""
|
||||
self.checkpoints.restore_checkpoint(self, checkpoint_id, path)
|
||||
|
||||
def list_checkpoints(self, path):
|
||||
return self.checkpoints.list_checkpoints(path)
|
||||
|
||||
def delete_checkpoint(self, checkpoint_id, path):
|
||||
return self.checkpoints.delete_checkpoint(checkpoint_id, path)
|
@ -0,0 +1,734 @@
|
||||
"""Test the contents webservice API."""
|
||||
|
||||
from contextlib import contextmanager
|
||||
from functools import partial
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from unicodedata import normalize
|
||||
|
||||
pjoin = os.path.join
|
||||
|
||||
import requests
|
||||
from send2trash import send2trash
|
||||
from send2trash.exceptions import TrashPermissionError
|
||||
|
||||
from ..filecheckpoints import GenericFileCheckpoints
|
||||
|
||||
from traitlets.config import Config
|
||||
from notebook.utils import url_path_join, url_escape, to_os_path
|
||||
from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error
|
||||
from nbformat import write, from_dict
|
||||
from nbformat.v4 import (
|
||||
new_notebook, new_markdown_cell,
|
||||
)
|
||||
from nbformat import v2
|
||||
from ipython_genutils import py3compat
|
||||
from ipython_genutils.tempdir import TemporaryDirectory
|
||||
|
||||
from base64 import encodebytes, decodebytes
|
||||
|
||||
|
||||
def uniq_stable(elems):
|
||||
"""uniq_stable(elems) -> list
|
||||
|
||||
Return from an iterable, a list of all the unique elements in the input,
|
||||
maintaining the order in which they first appear.
|
||||
"""
|
||||
seen = set()
|
||||
return [x for x in elems if x not in seen and not seen.add(x)]
|
||||
|
||||
def notebooks_only(dir_model):
|
||||
return [nb for nb in dir_model['content'] if nb['type']=='notebook']
|
||||
|
||||
def dirs_only(dir_model):
|
||||
return [x for x in dir_model['content'] if x['type']=='directory']
|
||||
|
||||
|
||||
class API:
|
||||
"""Wrapper for contents API calls."""
|
||||
def __init__(self, request):
|
||||
self.request = request
|
||||
|
||||
def _req(self, verb, path, body=None, params=None):
|
||||
response = self.request(verb,
|
||||
url_path_join('api/contents', path),
|
||||
data=body, params=params,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
def list(self, path='/'):
|
||||
return self._req('GET', path)
|
||||
|
||||
def read(self, path, type=None, format=None, content=None):
|
||||
params = {}
|
||||
if type is not None:
|
||||
params['type'] = type
|
||||
if format is not None:
|
||||
params['format'] = format
|
||||
if content == False:
|
||||
params['content'] = '0'
|
||||
return self._req('GET', path, params=params)
|
||||
|
||||
def create_untitled(self, path='/', ext='.ipynb'):
|
||||
body = None
|
||||
if ext:
|
||||
body = json.dumps({'ext': ext})
|
||||
return self._req('POST', path, body)
|
||||
|
||||
def mkdir_untitled(self, path='/'):
|
||||
return self._req('POST', path, json.dumps({'type': 'directory'}))
|
||||
|
||||
def copy(self, copy_from, path='/'):
|
||||
body = json.dumps({'copy_from':copy_from})
|
||||
return self._req('POST', path, body)
|
||||
|
||||
def create(self, path='/'):
|
||||
return self._req('PUT', path)
|
||||
|
||||
def upload(self, path, body):
|
||||
return self._req('PUT', path, body)
|
||||
|
||||
def mkdir(self, path='/'):
|
||||
return self._req('PUT', path, json.dumps({'type': 'directory'}))
|
||||
|
||||
def copy_put(self, copy_from, path='/'):
|
||||
body = json.dumps({'copy_from':copy_from})
|
||||
return self._req('PUT', path, body)
|
||||
|
||||
def save(self, path, body):
|
||||
return self._req('PUT', path, body)
|
||||
|
||||
def delete(self, path='/'):
|
||||
return self._req('DELETE', path)
|
||||
|
||||
def rename(self, path, new_path):
|
||||
body = json.dumps({'path': new_path})
|
||||
return self._req('PATCH', path, body)
|
||||
|
||||
def get_checkpoints(self, path):
|
||||
return self._req('GET', url_path_join(path, 'checkpoints'))
|
||||
|
||||
def new_checkpoint(self, path):
|
||||
return self._req('POST', url_path_join(path, 'checkpoints'))
|
||||
|
||||
def restore_checkpoint(self, path, checkpoint_id):
|
||||
return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id))
|
||||
|
||||
def delete_checkpoint(self, path, checkpoint_id):
|
||||
return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id))
|
||||
|
||||
class APITest(NotebookTestBase):
|
||||
"""Test the kernels web service API"""
|
||||
dirs_nbs = [('', 'inroot'),
|
||||
('Directory with spaces in', 'inspace'),
|
||||
('unicodé', 'innonascii'),
|
||||
('foo', 'a'),
|
||||
('foo', 'b'),
|
||||
('foo', 'name with spaces'),
|
||||
('foo', 'unicodé'),
|
||||
('foo/bar', 'baz'),
|
||||
('ordering', 'A'),
|
||||
('ordering', 'b'),
|
||||
('ordering', 'C'),
|
||||
('å b', 'ç d'),
|
||||
]
|
||||
hidden_dirs = ['.hidden', '__pycache__']
|
||||
|
||||
# Don't include root dir.
|
||||
dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]])
|
||||
top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs}
|
||||
|
||||
@staticmethod
|
||||
def _blob_for_name(name):
|
||||
return name.encode('utf-8') + b'\xFF'
|
||||
|
||||
@staticmethod
|
||||
def _txt_for_name(name):
|
||||
return f'{name} text file'
|
||||
|
||||
def to_os_path(self, api_path):
|
||||
return to_os_path(api_path, root=self.notebook_dir)
|
||||
|
||||
def make_dir(self, api_path):
|
||||
"""Create a directory at api_path"""
|
||||
os_path = self.to_os_path(api_path)
|
||||
try:
|
||||
os.makedirs(os_path)
|
||||
except OSError:
|
||||
print(f"Directory already exists: {os_path!r}")
|
||||
|
||||
def make_txt(self, api_path, txt):
|
||||
"""Make a text file at a given api_path"""
|
||||
os_path = self.to_os_path(api_path)
|
||||
with open(os_path, 'w', encoding='utf-8') as f:
|
||||
f.write(txt)
|
||||
|
||||
def make_blob(self, api_path, blob):
|
||||
"""Make a binary file at a given api_path"""
|
||||
os_path = self.to_os_path(api_path)
|
||||
with open(os_path, 'wb') as f:
|
||||
f.write(blob)
|
||||
|
||||
def make_nb(self, api_path, nb):
|
||||
"""Make a notebook file at a given api_path"""
|
||||
os_path = self.to_os_path(api_path)
|
||||
|
||||
with open(os_path, 'w', encoding='utf-8') as f:
|
||||
write(nb, f, version=4)
|
||||
|
||||
def delete_dir(self, api_path):
|
||||
"""Delete a directory at api_path, removing any contents."""
|
||||
os_path = self.to_os_path(api_path)
|
||||
shutil.rmtree(os_path, ignore_errors=True)
|
||||
|
||||
def delete_file(self, api_path):
|
||||
"""Delete a file at the given path if it exists."""
|
||||
if self.isfile(api_path):
|
||||
os.unlink(self.to_os_path(api_path))
|
||||
|
||||
def isfile(self, api_path):
|
||||
return os.path.isfile(self.to_os_path(api_path))
|
||||
|
||||
def isdir(self, api_path):
|
||||
return os.path.isdir(self.to_os_path(api_path))
|
||||
|
||||
def can_send2trash(self, api_path):
|
||||
"""Send a path to trash, if possible. Return success."""
|
||||
try:
|
||||
send2trash(self.to_os_path(api_path))
|
||||
return True
|
||||
except TrashPermissionError as e:
|
||||
return False
|
||||
|
||||
def setUp(self):
|
||||
for d in (self.dirs + self.hidden_dirs):
|
||||
self.make_dir(d)
|
||||
self.addCleanup(partial(self.delete_dir, d))
|
||||
|
||||
for d, name in self.dirs_nbs:
|
||||
# create a notebook
|
||||
nb = new_notebook()
|
||||
nbname = f'{d}/{name}.ipynb'
|
||||
self.make_nb(nbname, nb)
|
||||
self.addCleanup(partial(self.delete_file, nbname))
|
||||
|
||||
# create a text file
|
||||
txt = self._txt_for_name(name)
|
||||
txtname = f'{d}/{name}.txt'
|
||||
self.make_txt(txtname, txt)
|
||||
self.addCleanup(partial(self.delete_file, txtname))
|
||||
|
||||
blob = self._blob_for_name(name)
|
||||
blobname = f'{d}/{name}.blob'
|
||||
self.make_blob(blobname, blob)
|
||||
self.addCleanup(partial(self.delete_file, blobname))
|
||||
|
||||
self.api = API(self.request)
|
||||
|
||||
def test_list_notebooks(self):
|
||||
nbs = notebooks_only(self.api.list().json())
|
||||
self.assertEqual(len(nbs), 1)
|
||||
self.assertEqual(nbs[0]['name'], 'inroot.ipynb')
|
||||
|
||||
nbs = notebooks_only(self.api.list('/Directory with spaces in/').json())
|
||||
self.assertEqual(len(nbs), 1)
|
||||
self.assertEqual(nbs[0]['name'], 'inspace.ipynb')
|
||||
|
||||
nbs = notebooks_only(self.api.list('/unicodé/').json())
|
||||
self.assertEqual(len(nbs), 1)
|
||||
self.assertEqual(nbs[0]['name'], 'innonascii.ipynb')
|
||||
self.assertEqual(nbs[0]['path'], 'unicodé/innonascii.ipynb')
|
||||
|
||||
nbs = notebooks_only(self.api.list('/foo/bar/').json())
|
||||
self.assertEqual(len(nbs), 1)
|
||||
self.assertEqual(nbs[0]['name'], 'baz.ipynb')
|
||||
self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb')
|
||||
|
||||
nbs = notebooks_only(self.api.list('foo').json())
|
||||
self.assertEqual(len(nbs), 4)
|
||||
nbnames = { normalize('NFC', n['name']) for n in nbs }
|
||||
expected = [ 'a.ipynb', 'b.ipynb', 'name with spaces.ipynb', 'unicodé.ipynb']
|
||||
expected = { normalize('NFC', name) for name in expected }
|
||||
self.assertEqual(nbnames, expected)
|
||||
|
||||
nbs = notebooks_only(self.api.list('ordering').json())
|
||||
nbnames = {n['name'] for n in nbs}
|
||||
expected = {'A.ipynb', 'b.ipynb', 'C.ipynb'}
|
||||
self.assertEqual(nbnames, expected)
|
||||
|
||||
def test_list_dirs(self):
|
||||
dirs = dirs_only(self.api.list().json())
|
||||
dir_names = {normalize('NFC', d['name']) for d in dirs}
|
||||
self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs
|
||||
|
||||
def test_get_dir_no_content(self):
|
||||
for d in self.dirs:
|
||||
model = self.api.read(d, content=False).json()
|
||||
self.assertEqual(model['path'], d)
|
||||
self.assertEqual(model['type'], 'directory')
|
||||
self.assertIn('content', model)
|
||||
self.assertEqual(model['content'], None)
|
||||
|
||||
def test_list_nonexistant_dir(self):
|
||||
with assert_http_error(404):
|
||||
self.api.list('nonexistant')
|
||||
|
||||
def test_get_nb_contents(self):
|
||||
for d, name in self.dirs_nbs:
|
||||
path = url_path_join(d, name + '.ipynb')
|
||||
nb = self.api.read(path).json()
|
||||
self.assertEqual(nb['name'], f'{name}.ipynb')
|
||||
self.assertEqual(nb['path'], path)
|
||||
self.assertEqual(nb['type'], 'notebook')
|
||||
self.assertIn('content', nb)
|
||||
self.assertEqual(nb['format'], 'json')
|
||||
self.assertIn('metadata', nb['content'])
|
||||
self.assertIsInstance(nb['content']['metadata'], dict)
|
||||
|
||||
def test_get_nb_no_content(self):
|
||||
for d, name in self.dirs_nbs:
|
||||
path = url_path_join(d, name + '.ipynb')
|
||||
nb = self.api.read(path, content=False).json()
|
||||
self.assertEqual(nb['name'], f'{name}.ipynb')
|
||||
self.assertEqual(nb['path'], path)
|
||||
self.assertEqual(nb['type'], 'notebook')
|
||||
self.assertIn('content', nb)
|
||||
self.assertEqual(nb['content'], None)
|
||||
|
||||
def test_get_nb_invalid(self):
|
||||
nb = {
|
||||
'nbformat': 4,
|
||||
'metadata': {},
|
||||
'cells': [{
|
||||
'cell_type': 'wrong',
|
||||
'metadata': {},
|
||||
}],
|
||||
}
|
||||
path = 'å b/Validate tést.ipynb'
|
||||
self.make_txt(path, py3compat.cast_unicode(json.dumps(nb)))
|
||||
model = self.api.read(path).json()
|
||||
self.assertEqual(model['path'], path)
|
||||
self.assertEqual(model['type'], 'notebook')
|
||||
self.assertIn('content', model)
|
||||
self.assertIn('message', model)
|
||||
self.assertIn("validation failed", model['message'].lower())
|
||||
|
||||
def test_get_contents_no_such_file(self):
|
||||
# Name that doesn't exist - should be a 404
|
||||
with assert_http_error(404):
|
||||
self.api.read('foo/q.ipynb')
|
||||
|
||||
def test_get_text_file_contents(self):
|
||||
for d, name in self.dirs_nbs:
|
||||
path = url_path_join(d, name + '.txt')
|
||||
model = self.api.read(path).json()
|
||||
self.assertEqual(model['name'], f'{name}.txt')
|
||||
self.assertEqual(model['path'], path)
|
||||
self.assertIn('content', model)
|
||||
self.assertEqual(model['format'], 'text')
|
||||
self.assertEqual(model['type'], 'file')
|
||||
self.assertEqual(model['content'], self._txt_for_name(name))
|
||||
|
||||
# Name that doesn't exist - should be a 404
|
||||
with assert_http_error(404):
|
||||
self.api.read('foo/q.txt')
|
||||
|
||||
# Specifying format=text should fail on a non-UTF-8 file
|
||||
with assert_http_error(400):
|
||||
self.api.read('foo/bar/baz.blob', type='file', format='text')
|
||||
|
||||
def test_get_binary_file_contents(self):
|
||||
for d, name in self.dirs_nbs:
|
||||
path = url_path_join(d, name + '.blob')
|
||||
model = self.api.read(path).json()
|
||||
self.assertEqual(model['name'], f'{name}.blob')
|
||||
self.assertEqual(model['path'], path)
|
||||
self.assertIn('content', model)
|
||||
self.assertEqual(model['format'], 'base64')
|
||||
self.assertEqual(model['type'], 'file')
|
||||
self.assertEqual(
|
||||
decodebytes(model['content'].encode('ascii')),
|
||||
self._blob_for_name(name),
|
||||
)
|
||||
|
||||
# Name that doesn't exist - should be a 404
|
||||
with assert_http_error(404):
|
||||
self.api.read('foo/q.txt')
|
||||
|
||||
def test_get_bad_type(self):
|
||||
with assert_http_error(400):
|
||||
self.api.read('unicodé', type='file') # this is a directory
|
||||
|
||||
with assert_http_error(400):
|
||||
self.api.read('unicodé/innonascii.ipynb', type='directory')
|
||||
|
||||
def _check_created(self, resp, path, type='notebook'):
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
location_header = py3compat.str_to_unicode(resp.headers['Location'])
|
||||
self.assertEqual(location_header, url_path_join(self.url_prefix, 'api/contents', url_escape(path)))
|
||||
rjson = resp.json()
|
||||
self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1])
|
||||
self.assertEqual(rjson['path'], path)
|
||||
self.assertEqual(rjson['type'], type)
|
||||
isright = self.isdir if type == 'directory' else self.isfile
|
||||
assert isright(path)
|
||||
|
||||
def test_create_untitled(self):
|
||||
resp = self.api.create_untitled(path='å b')
|
||||
self._check_created(resp, 'å b/Untitled.ipynb')
|
||||
|
||||
# Second time
|
||||
resp = self.api.create_untitled(path='å b')
|
||||
self._check_created(resp, 'å b/Untitled1.ipynb')
|
||||
|
||||
# And two directories down
|
||||
resp = self.api.create_untitled(path='foo/bar')
|
||||
self._check_created(resp, 'foo/bar/Untitled.ipynb')
|
||||
|
||||
def test_create_untitled_txt(self):
|
||||
resp = self.api.create_untitled(path='foo/bar', ext='.txt')
|
||||
self._check_created(resp, 'foo/bar/untitled.txt', type='file')
|
||||
|
||||
resp = self.api.read(path='foo/bar/untitled.txt')
|
||||
model = resp.json()
|
||||
self.assertEqual(model['type'], 'file')
|
||||
self.assertEqual(model['format'], 'text')
|
||||
self.assertEqual(model['content'], '')
|
||||
|
||||
def test_upload(self):
|
||||
nb = new_notebook()
|
||||
nbmodel = {'content': nb, 'type': 'notebook'}
|
||||
path = 'å b/Upload tést.ipynb'
|
||||
resp = self.api.upload(path, body=json.dumps(nbmodel))
|
||||
self._check_created(resp, path)
|
||||
|
||||
def test_mkdir_untitled(self):
|
||||
resp = self.api.mkdir_untitled(path='å b')
|
||||
self._check_created(resp, 'å b/Untitled Folder', type='directory')
|
||||
|
||||
# Second time
|
||||
resp = self.api.mkdir_untitled(path='å b')
|
||||
self._check_created(resp, 'å b/Untitled Folder 1', type='directory')
|
||||
|
||||
# And two directories down
|
||||
resp = self.api.mkdir_untitled(path='foo/bar')
|
||||
self._check_created(resp, 'foo/bar/Untitled Folder', type='directory')
|
||||
|
||||
def test_mkdir(self):
|
||||
path = 'å b/New ∂ir'
|
||||
resp = self.api.mkdir(path)
|
||||
self._check_created(resp, path, type='directory')
|
||||
|
||||
def test_mkdir_hidden_400(self):
|
||||
with assert_http_error(400):
|
||||
resp = self.api.mkdir('å b/.hidden')
|
||||
|
||||
def test_upload_txt(self):
|
||||
body = 'ünicode téxt'
|
||||
model = {
|
||||
'content' : body,
|
||||
'format' : 'text',
|
||||
'type' : 'file',
|
||||
}
|
||||
path = 'å b/Upload tést.txt'
|
||||
resp = self.api.upload(path, body=json.dumps(model))
|
||||
|
||||
# check roundtrip
|
||||
resp = self.api.read(path)
|
||||
model = resp.json()
|
||||
self.assertEqual(model['type'], 'file')
|
||||
self.assertEqual(model['format'], 'text')
|
||||
self.assertEqual(model['content'], body)
|
||||
|
||||
def test_upload_b64(self):
|
||||
body = b'\xFFblob'
|
||||
b64body = encodebytes(body).decode('ascii')
|
||||
model = {
|
||||
'content' : b64body,
|
||||
'format' : 'base64',
|
||||
'type' : 'file',
|
||||
}
|
||||
path = 'å b/Upload tést.blob'
|
||||
resp = self.api.upload(path, body=json.dumps(model))
|
||||
|
||||
# check roundtrip
|
||||
resp = self.api.read(path)
|
||||
model = resp.json()
|
||||
self.assertEqual(model['type'], 'file')
|
||||
self.assertEqual(model['path'], path)
|
||||
self.assertEqual(model['format'], 'base64')
|
||||
decoded = decodebytes(model['content'].encode('ascii'))
|
||||
self.assertEqual(decoded, body)
|
||||
|
||||
def test_upload_v2(self):
|
||||
nb = v2.new_notebook()
|
||||
ws = v2.new_worksheet()
|
||||
nb.worksheets.append(ws)
|
||||
ws.cells.append(v2.new_code_cell(input='print("hi")'))
|
||||
nbmodel = {'content': nb, 'type': 'notebook'}
|
||||
path = 'å b/Upload tést.ipynb'
|
||||
resp = self.api.upload(path, body=json.dumps(nbmodel))
|
||||
self._check_created(resp, path)
|
||||
resp = self.api.read(path)
|
||||
data = resp.json()
|
||||
self.assertEqual(data['content']['nbformat'], 4)
|
||||
|
||||
def test_copy(self):
|
||||
resp = self.api.copy('å b/ç d.ipynb', 'å b')
|
||||
self._check_created(resp, 'å b/ç d-Copy1.ipynb')
|
||||
|
||||
resp = self.api.copy('å b/ç d.ipynb', 'å b')
|
||||
self._check_created(resp, 'å b/ç d-Copy2.ipynb')
|
||||
|
||||
def test_copy_copy(self):
|
||||
resp = self.api.copy('å b/ç d.ipynb', 'å b')
|
||||
self._check_created(resp, 'å b/ç d-Copy1.ipynb')
|
||||
|
||||
resp = self.api.copy('å b/ç d-Copy1.ipynb', 'å b')
|
||||
self._check_created(resp, 'å b/ç d-Copy2.ipynb')
|
||||
|
||||
def test_copy_path(self):
|
||||
resp = self.api.copy('foo/a.ipynb', 'å b')
|
||||
self._check_created(resp, 'å b/a.ipynb')
|
||||
|
||||
resp = self.api.copy('foo/a.ipynb', 'å b')
|
||||
self._check_created(resp, 'å b/a-Copy1.ipynb')
|
||||
|
||||
def test_copy_put_400(self):
|
||||
with assert_http_error(400):
|
||||
resp = self.api.copy_put('å b/ç d.ipynb', 'å b/cøpy.ipynb')
|
||||
|
||||
def test_copy_dir_400(self):
|
||||
# can't copy directories
|
||||
with assert_http_error(400):
|
||||
resp = self.api.copy('å b', 'foo')
|
||||
|
||||
def test_delete(self):
|
||||
for d, name in self.dirs_nbs:
|
||||
print(f'{d!r}, {name!r}')
|
||||
resp = self.api.delete(url_path_join(d, name + '.ipynb'))
|
||||
self.assertEqual(resp.status_code, 204)
|
||||
|
||||
for d in self.dirs + ['/']:
|
||||
nbs = notebooks_only(self.api.list(d).json())
|
||||
print('------')
|
||||
print(d)
|
||||
print(nbs)
|
||||
self.assertEqual(nbs, [])
|
||||
|
||||
def test_delete_dirs(self):
|
||||
# depth-first delete everything, so we don't try to delete empty directories
|
||||
for name in sorted(self.dirs + ['/'], key=len, reverse=True):
|
||||
listing = self.api.list(name).json()['content']
|
||||
for model in listing:
|
||||
self.api.delete(model['path'])
|
||||
listing = self.api.list('/').json()['content']
|
||||
self.assertEqual(listing, [])
|
||||
|
||||
def test_delete_non_empty_dir(self):
|
||||
if sys.platform == 'win32':
|
||||
self.skipTest("Disabled deleting non-empty dirs on Windows")
|
||||
# Test that non empty directory can be deleted
|
||||
try:
|
||||
self.api.delete('å b')
|
||||
except requests.HTTPError as e:
|
||||
if e.response.status_code == 400:
|
||||
if not self.can_send2trash('å b'):
|
||||
self.skipTest("Dir can't be sent to trash")
|
||||
raise
|
||||
# Check if directory has actually been deleted
|
||||
with assert_http_error(404):
|
||||
self.api.list('å b')
|
||||
|
||||
def test_rename(self):
|
||||
resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb')
|
||||
self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb')
|
||||
self.assertEqual(resp.json()['name'], 'z.ipynb')
|
||||
self.assertEqual(resp.json()['path'], 'foo/z.ipynb')
|
||||
assert self.isfile('foo/z.ipynb')
|
||||
|
||||
nbs = notebooks_only(self.api.list('foo').json())
|
||||
nbnames = {n['name'] for n in nbs}
|
||||
self.assertIn('z.ipynb', nbnames)
|
||||
self.assertNotIn('a.ipynb', nbnames)
|
||||
|
||||
def test_checkpoints_follow_file(self):
|
||||
|
||||
# Read initial file state
|
||||
orig = self.api.read('foo/a.ipynb')
|
||||
|
||||
# Create a checkpoint of initial state
|
||||
r = self.api.new_checkpoint('foo/a.ipynb')
|
||||
cp1 = r.json()
|
||||
|
||||
# Modify file and save
|
||||
nbcontent = json.loads(orig.text)['content']
|
||||
nb = from_dict(nbcontent)
|
||||
hcell = new_markdown_cell('Created by test')
|
||||
nb.cells.append(hcell)
|
||||
nbmodel = {'content': nb, 'type': 'notebook'}
|
||||
self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
|
||||
|
||||
# Rename the file.
|
||||
self.api.rename('foo/a.ipynb', 'foo/z.ipynb')
|
||||
|
||||
# Looking for checkpoints in the old location should yield no results.
|
||||
self.assertEqual(self.api.get_checkpoints('foo/a.ipynb').json(), [])
|
||||
|
||||
# Looking for checkpoints in the new location should work.
|
||||
cps = self.api.get_checkpoints('foo/z.ipynb').json()
|
||||
self.assertEqual(cps, [cp1])
|
||||
|
||||
# Delete the file. The checkpoint should be deleted as well.
|
||||
self.api.delete('foo/z.ipynb')
|
||||
cps = self.api.get_checkpoints('foo/z.ipynb').json()
|
||||
self.assertEqual(cps, [])
|
||||
|
||||
def test_rename_existing(self):
|
||||
with assert_http_error(409):
|
||||
self.api.rename('foo/a.ipynb', 'foo/b.ipynb')
|
||||
|
||||
def test_save(self):
|
||||
resp = self.api.read('foo/a.ipynb')
|
||||
nbcontent = json.loads(resp.text)['content']
|
||||
nb = from_dict(nbcontent)
|
||||
nb.cells.append(new_markdown_cell('Created by test ³'))
|
||||
|
||||
nbmodel = {'content': nb, 'type': 'notebook'}
|
||||
resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
|
||||
|
||||
nbcontent = self.api.read('foo/a.ipynb').json()['content']
|
||||
newnb = from_dict(nbcontent)
|
||||
self.assertEqual(newnb.cells[0].source,
|
||||
'Created by test ³')
|
||||
|
||||
def test_checkpoints(self):
|
||||
resp = self.api.read('foo/a.ipynb')
|
||||
r = self.api.new_checkpoint('foo/a.ipynb')
|
||||
self.assertEqual(r.status_code, 201)
|
||||
cp1 = r.json()
|
||||
self.assertEqual(set(cp1), {'id', 'last_modified'})
|
||||
self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])
|
||||
|
||||
# Modify it
|
||||
nbcontent = json.loads(resp.text)['content']
|
||||
nb = from_dict(nbcontent)
|
||||
hcell = new_markdown_cell('Created by test')
|
||||
nb.cells.append(hcell)
|
||||
# Save
|
||||
nbmodel= {'content': nb, 'type': 'notebook'}
|
||||
resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
|
||||
|
||||
# List checkpoints
|
||||
cps = self.api.get_checkpoints('foo/a.ipynb').json()
|
||||
self.assertEqual(cps, [cp1])
|
||||
|
||||
nbcontent = self.api.read('foo/a.ipynb').json()['content']
|
||||
nb = from_dict(nbcontent)
|
||||
self.assertEqual(nb.cells[0].source, 'Created by test')
|
||||
|
||||
# Restore cp1
|
||||
r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id'])
|
||||
self.assertEqual(r.status_code, 204)
|
||||
nbcontent = self.api.read('foo/a.ipynb').json()['content']
|
||||
nb = from_dict(nbcontent)
|
||||
self.assertEqual(nb.cells, [])
|
||||
|
||||
# Delete cp1
|
||||
r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id'])
|
||||
self.assertEqual(r.status_code, 204)
|
||||
cps = self.api.get_checkpoints('foo/a.ipynb').json()
|
||||
self.assertEqual(cps, [])
|
||||
|
||||
def test_file_checkpoints(self):
|
||||
"""
|
||||
Test checkpointing of non-notebook files.
|
||||
"""
|
||||
filename = 'foo/a.txt'
|
||||
resp = self.api.read(filename)
|
||||
orig_content = json.loads(resp.text)['content']
|
||||
|
||||
# Create a checkpoint.
|
||||
r = self.api.new_checkpoint(filename)
|
||||
self.assertEqual(r.status_code, 201)
|
||||
cp1 = r.json()
|
||||
self.assertEqual(set(cp1), {'id', 'last_modified'})
|
||||
self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])
|
||||
|
||||
# Modify the file and save.
|
||||
new_content = orig_content + '\nsecond line'
|
||||
model = {
|
||||
'content': new_content,
|
||||
'type': 'file',
|
||||
'format': 'text',
|
||||
}
|
||||
resp = self.api.save(filename, body=json.dumps(model))
|
||||
|
||||
# List checkpoints
|
||||
cps = self.api.get_checkpoints(filename).json()
|
||||
self.assertEqual(cps, [cp1])
|
||||
|
||||
content = self.api.read(filename).json()['content']
|
||||
self.assertEqual(content, new_content)
|
||||
|
||||
# Restore cp1
|
||||
r = self.api.restore_checkpoint(filename, cp1['id'])
|
||||
self.assertEqual(r.status_code, 204)
|
||||
restored_content = self.api.read(filename).json()['content']
|
||||
self.assertEqual(restored_content, orig_content)
|
||||
|
||||
# Delete cp1
|
||||
r = self.api.delete_checkpoint(filename, cp1['id'])
|
||||
self.assertEqual(r.status_code, 204)
|
||||
cps = self.api.get_checkpoints(filename).json()
|
||||
self.assertEqual(cps, [])
|
||||
|
||||
@contextmanager
|
||||
def patch_cp_root(self, dirname):
|
||||
"""
|
||||
Temporarily patch the root dir of our checkpoint manager.
|
||||
"""
|
||||
cpm = self.notebook.contents_manager.checkpoints
|
||||
old_dirname = cpm.root_dir
|
||||
cpm.root_dir = dirname
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
cpm.root_dir = old_dirname
|
||||
|
||||
def test_checkpoints_separate_root(self):
|
||||
"""
|
||||
Test that FileCheckpoints functions correctly even when it's
|
||||
using a different root dir from FileContentsManager. This also keeps
|
||||
the implementation honest for use with ContentsManagers that don't map
|
||||
models to the filesystem
|
||||
|
||||
Override this method to a no-op when testing other managers.
|
||||
"""
|
||||
with TemporaryDirectory() as td:
|
||||
with self.patch_cp_root(td):
|
||||
self.test_checkpoints()
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
with self.patch_cp_root(td):
|
||||
self.test_file_checkpoints()
|
||||
|
||||
|
||||
class GenericFileCheckpointsAPITest(APITest):
|
||||
"""
|
||||
Run the tests from APITest with GenericFileCheckpoints.
|
||||
"""
|
||||
config = Config()
|
||||
config.FileContentsManager.checkpoints_class = GenericFileCheckpoints
|
||||
|
||||
def test_config_did_something(self):
|
||||
|
||||
self.assertIsInstance(
|
||||
self.notebook.contents_manager.checkpoints,
|
||||
GenericFileCheckpoints,
|
||||
)
|
||||
|
||||
|
@ -0,0 +1,132 @@
|
||||
"""Tests for file IO"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os.path
|
||||
import unittest
|
||||
import pytest
|
||||
import stat
|
||||
import sys
|
||||
|
||||
from ..fileio import atomic_writing
|
||||
|
||||
from ipython_genutils.tempdir import TemporaryDirectory
|
||||
|
||||
umask = 0
|
||||
|
||||
def test_atomic_writing():
|
||||
class CustomExc(Exception):
|
||||
pass
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
f1 = os.path.join(td, 'penguin')
|
||||
with open(f1, 'w') as f:
|
||||
f.write('Before')
|
||||
|
||||
if os.name != 'nt':
|
||||
os.chmod(f1, 0o701)
|
||||
orig_mode = stat.S_IMODE(os.stat(f1).st_mode)
|
||||
|
||||
f2 = os.path.join(td, 'flamingo')
|
||||
try:
|
||||
os.symlink(f1, f2)
|
||||
have_symlink = True
|
||||
except (AttributeError, NotImplementedError, OSError):
|
||||
# AttributeError: Python doesn't support it
|
||||
# NotImplementedError: The system doesn't support it
|
||||
# OSError: The user lacks the privilege (Windows)
|
||||
have_symlink = False
|
||||
|
||||
with pytest.raises(CustomExc):
|
||||
with atomic_writing(f1) as f:
|
||||
f.write('Failing write')
|
||||
raise CustomExc
|
||||
|
||||
# Because of the exception, the file should not have been modified
|
||||
with open(f1) as f:
|
||||
assert f.read() == 'Before'
|
||||
|
||||
with atomic_writing(f1) as f:
|
||||
f.write('Overwritten')
|
||||
|
||||
with open(f1) as f:
|
||||
assert f.read() == 'Overwritten'
|
||||
|
||||
if os.name != 'nt':
|
||||
mode = stat.S_IMODE(os.stat(f1).st_mode)
|
||||
assert mode == orig_mode
|
||||
|
||||
if have_symlink:
|
||||
# Check that writing over a file preserves a symlink
|
||||
with atomic_writing(f2) as f:
|
||||
f.write('written from symlink')
|
||||
|
||||
with open(f1) as f:
|
||||
assert f.read() == 'written from symlink'
|
||||
|
||||
class TestWithSetUmask(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# save umask
|
||||
global umask
|
||||
umask = os.umask(0)
|
||||
os.umask(umask)
|
||||
|
||||
def tearDown(self):
|
||||
# restore umask
|
||||
os.umask(umask)
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows")
|
||||
def test_atomic_writing_umask(self):
|
||||
with TemporaryDirectory() as td:
|
||||
os.umask(0o022)
|
||||
f1 = os.path.join(td, '1')
|
||||
with atomic_writing(f1) as f:
|
||||
f.write('1')
|
||||
mode = stat.S_IMODE(os.stat(f1).st_mode)
|
||||
assert mode == 0o644
|
||||
|
||||
os.umask(0o057)
|
||||
f2 = os.path.join(td, '2')
|
||||
with atomic_writing(f2) as f:
|
||||
f.write('2')
|
||||
mode = stat.S_IMODE(os.stat(f2).st_mode)
|
||||
assert mode == 0o620
|
||||
|
||||
|
||||
def test_atomic_writing_newlines():
|
||||
with TemporaryDirectory() as td:
|
||||
path = os.path.join(td, 'testfile')
|
||||
|
||||
lf = 'a\nb\nc\n'
|
||||
plat = lf.replace('\n', os.linesep)
|
||||
crlf = lf.replace('\n', '\r\n')
|
||||
|
||||
# test default
|
||||
with open(path, 'w') as f:
|
||||
f.write(lf)
|
||||
with open(path, newline='') as f:
|
||||
read = f.read()
|
||||
assert read == plat
|
||||
|
||||
# test newline=LF
|
||||
with open(path, 'w', newline='\n') as f:
|
||||
f.write(lf)
|
||||
with open(path, newline='') as f:
|
||||
read = f.read()
|
||||
assert read == lf
|
||||
|
||||
# test newline=CRLF
|
||||
with atomic_writing(path, newline='\r\n') as f:
|
||||
f.write(lf)
|
||||
with open(path, newline='') as f:
|
||||
read = f.read()
|
||||
assert read == crlf
|
||||
|
||||
# test newline=no convert
|
||||
text = 'crlf\r\ncr\rlf\n'
|
||||
with atomic_writing(path, newline='') as f:
|
||||
f.write(text)
|
||||
with open(path, newline='') as f:
|
||||
read = f.read()
|
||||
assert read == text
|
@ -0,0 +1,113 @@
|
||||
from unittest import TestCase
|
||||
from ipython_genutils.tempdir import TemporaryDirectory
|
||||
from ..largefilemanager import LargeFileManager
|
||||
import os
|
||||
from tornado import web
|
||||
|
||||
|
||||
def _make_dir(contents_manager, api_path):
|
||||
"""
|
||||
Make a directory.
|
||||
"""
|
||||
os_path = contents_manager._get_os_path(api_path)
|
||||
try:
|
||||
os.makedirs(os_path)
|
||||
except OSError:
|
||||
print(f"Directory already exists: {os_path!r}")
|
||||
|
||||
|
||||
class TestLargeFileManager(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._temp_dir = TemporaryDirectory()
|
||||
self.td = self._temp_dir.name
|
||||
self.contents_manager = LargeFileManager(root_dir=self.td)
|
||||
|
||||
def make_dir(self, api_path):
|
||||
"""make a subdirectory at api_path
|
||||
|
||||
override in subclasses if contents are not on the filesystem.
|
||||
"""
|
||||
_make_dir(self.contents_manager, api_path)
|
||||
|
||||
def test_save(self):
|
||||
|
||||
cm = self.contents_manager
|
||||
# Create a notebook
|
||||
model = cm.new_untitled(type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
|
||||
# Get the model with 'content'
|
||||
full_model = cm.get(path)
|
||||
# Save the notebook
|
||||
model = cm.save(full_model, path)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertEqual(model['name'], name)
|
||||
self.assertEqual(model['path'], path)
|
||||
|
||||
try:
|
||||
model = {'name': 'test', 'path': 'test', 'chunk': 1}
|
||||
cm.save(model, model['path'])
|
||||
except web.HTTPError as e:
|
||||
self.assertEqual('HTTP 400: Bad Request (No file type provided)', str(e))
|
||||
|
||||
try:
|
||||
model = {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'notebook'}
|
||||
cm.save(model, model['path'])
|
||||
except web.HTTPError as e:
|
||||
self.assertEqual('HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)', str(e))
|
||||
|
||||
try:
|
||||
model = {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'file'}
|
||||
cm.save(model, model['path'])
|
||||
except web.HTTPError as e:
|
||||
self.assertEqual('HTTP 400: Bad Request (No file content provided)', str(e))
|
||||
|
||||
try:
|
||||
model = {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file',
|
||||
'content': 'test', 'format': 'json'}
|
||||
cm.save(model, model['path'])
|
||||
except web.HTTPError as e:
|
||||
self.assertEqual("HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')",
|
||||
str(e))
|
||||
|
||||
# Save model for different chunks
|
||||
model = {'name': 'test', 'path': 'test', 'type': 'file',
|
||||
'content': 'test==', 'format': 'text'}
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
cm.save(model, path)
|
||||
|
||||
for chunk in (1, 2, -1):
|
||||
for fm in ('text', 'base64'):
|
||||
full_model = cm.get(path)
|
||||
full_model['chunk'] = chunk
|
||||
full_model['format'] = fm
|
||||
model_res = cm.save(full_model, path)
|
||||
assert isinstance(model_res, dict)
|
||||
|
||||
self.assertIn('name', model_res)
|
||||
self.assertIn('path', model_res)
|
||||
self.assertNotIn('chunk', model_res)
|
||||
self.assertEqual(model_res['name'], name)
|
||||
self.assertEqual(model_res['path'], path)
|
||||
|
||||
# Test in sub-directory
|
||||
# Create a directory and notebook in that directory
|
||||
sub_dir = '/foo/'
|
||||
self.make_dir('foo')
|
||||
model = cm.new_untitled(path=sub_dir, type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
model = cm.get(path)
|
||||
|
||||
# Change the name in the model for rename
|
||||
model = cm.save(model, path)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertEqual(model['name'], 'Untitled.ipynb')
|
||||
self.assertEqual(model['path'], 'foo/Untitled.ipynb')
|
@ -0,0 +1,658 @@
|
||||
"""Tests for the notebook manager."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from itertools import combinations
|
||||
|
||||
from tornado.web import HTTPError
|
||||
from unittest import TestCase, skipIf
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from nbformat import v4 as nbformat
|
||||
|
||||
from ipython_genutils.tempdir import TemporaryDirectory
|
||||
from traitlets import TraitError
|
||||
|
||||
from ..filemanager import FileContentsManager
|
||||
|
||||
|
||||
def _make_dir(contents_manager, api_path):
|
||||
"""
|
||||
Make a directory.
|
||||
"""
|
||||
os_path = contents_manager._get_os_path(api_path)
|
||||
try:
|
||||
os.makedirs(os_path)
|
||||
except OSError:
|
||||
print(f"Directory already exists: {os_path!r}")
|
||||
|
||||
|
||||
class TestFileContentsManager(TestCase):
|
||||
|
||||
@contextmanager
|
||||
def assertRaisesHTTPError(self, status, msg=None):
|
||||
msg = msg or f"Should have raised HTTPError({status})"
|
||||
try:
|
||||
yield
|
||||
except HTTPError as e:
|
||||
self.assertEqual(e.status_code, status)
|
||||
else:
|
||||
self.fail(msg)
|
||||
|
||||
def symlink(self, contents_manager, src, dst):
|
||||
"""Make a symlink to src from dst
|
||||
|
||||
src and dst are api_paths
|
||||
"""
|
||||
src_os_path = contents_manager._get_os_path(src)
|
||||
dst_os_path = contents_manager._get_os_path(dst)
|
||||
print(src_os_path, dst_os_path, os.path.isfile(src_os_path))
|
||||
os.symlink(src_os_path, dst_os_path)
|
||||
|
||||
def test_root_dir(self):
|
||||
with TemporaryDirectory() as td:
|
||||
fm = FileContentsManager(root_dir=td)
|
||||
self.assertEqual(fm.root_dir, td)
|
||||
|
||||
def test_missing_root_dir(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = os.path.join(td, 'notebook', 'dir', 'is', 'missing')
|
||||
self.assertRaises(TraitError, FileContentsManager, root_dir=root)
|
||||
|
||||
def test_invalid_root_dir(self):
|
||||
with NamedTemporaryFile() as tf:
|
||||
self.assertRaises(TraitError, FileContentsManager, root_dir=tf.name)
|
||||
|
||||
def test_get_os_path(self):
|
||||
# full filesystem path should be returned with correct operating system
|
||||
# separators.
|
||||
with TemporaryDirectory() as td:
|
||||
root = td
|
||||
fm = FileContentsManager(root_dir=root)
|
||||
path = fm._get_os_path('/path/to/notebook/test.ipynb')
|
||||
rel_path_list = '/path/to/notebook/test.ipynb'.split('/')
|
||||
fs_path = os.path.join(fm.root_dir, *rel_path_list)
|
||||
self.assertEqual(path, fs_path)
|
||||
|
||||
fm = FileContentsManager(root_dir=root)
|
||||
path = fm._get_os_path('test.ipynb')
|
||||
fs_path = os.path.join(fm.root_dir, 'test.ipynb')
|
||||
self.assertEqual(path, fs_path)
|
||||
|
||||
fm = FileContentsManager(root_dir=root)
|
||||
path = fm._get_os_path('////test.ipynb')
|
||||
fs_path = os.path.join(fm.root_dir, 'test.ipynb')
|
||||
self.assertEqual(path, fs_path)
|
||||
|
||||
def test_checkpoint_subdir(self):
|
||||
subd = 'sub ∂ir'
|
||||
cp_name = 'test-cp.ipynb'
|
||||
with TemporaryDirectory() as td:
|
||||
root = td
|
||||
os.mkdir(os.path.join(td, subd))
|
||||
fm = FileContentsManager(root_dir=root)
|
||||
cpm = fm.checkpoints
|
||||
cp_dir = cpm.checkpoint_path(
|
||||
'cp', 'test.ipynb'
|
||||
)
|
||||
cp_subdir = cpm.checkpoint_path(
|
||||
'cp', f'/{subd}/test.ipynb'
|
||||
)
|
||||
self.assertNotEqual(cp_dir, cp_subdir)
|
||||
self.assertEqual(cp_dir, os.path.join(root, cpm.checkpoint_dir, cp_name))
|
||||
self.assertEqual(cp_subdir, os.path.join(root, subd, cpm.checkpoint_dir, cp_name))
|
||||
|
||||
def test_bad_symlink(self):
|
||||
with TemporaryDirectory() as td:
|
||||
cm = FileContentsManager(root_dir=td)
|
||||
path = 'test bad symlink'
|
||||
_make_dir(cm, path)
|
||||
|
||||
file_model = cm.new_untitled(path=path, ext='.txt')
|
||||
|
||||
# create a broken symlink
|
||||
self.symlink(cm, "target", f'{path}/{"bad symlink"}')
|
||||
model = cm.get(path)
|
||||
|
||||
contents = {
|
||||
content['name']: content for content in model['content']
|
||||
}
|
||||
self.assertTrue('untitled.txt' in contents)
|
||||
self.assertEqual(contents['untitled.txt'], file_model)
|
||||
# broken symlinks should still be shown in the contents manager
|
||||
self.assertTrue('bad symlink' in contents)
|
||||
|
||||
@skipIf(sys.platform == 'win32', "will not run on windows")
|
||||
def test_recursive_symlink(self):
|
||||
with TemporaryDirectory() as td:
|
||||
cm = FileContentsManager(root_dir=td)
|
||||
path = 'test recursive symlink'
|
||||
_make_dir(cm, path)
|
||||
os_path = cm._get_os_path(path)
|
||||
os.symlink("recursive", os.path.join(os_path, "recursive"))
|
||||
file_model = cm.new_untitled(path=path, ext='.txt')
|
||||
|
||||
model = cm.get(path)
|
||||
|
||||
contents = {
|
||||
content['name']: content for content in model['content']
|
||||
}
|
||||
self.assertIn('untitled.txt', contents)
|
||||
self.assertEqual(contents['untitled.txt'], file_model)
|
||||
# recursive symlinks should not be shown in the contents manager
|
||||
self.assertNotIn('recursive', contents)
|
||||
|
||||
def test_good_symlink(self):
|
||||
with TemporaryDirectory() as td:
|
||||
cm = FileContentsManager(root_dir=td)
|
||||
parent = 'test good symlink'
|
||||
name = 'good symlink'
|
||||
path = f'{parent}/{name}'
|
||||
_make_dir(cm, parent)
|
||||
|
||||
file_model = cm.new(path=parent + '/zfoo.txt')
|
||||
|
||||
# create a good symlink
|
||||
self.symlink(cm, file_model['path'], path)
|
||||
symlink_model = cm.get(path, content=False)
|
||||
dir_model = cm.get(parent)
|
||||
self.assertEqual(
|
||||
sorted(dir_model['content'], key=lambda x: x['name']),
|
||||
[symlink_model, file_model],
|
||||
)
|
||||
|
||||
|
||||
@skipIf(hasattr(os, 'getuid') and os.getuid() == 0, "Can't test permissions as root")
|
||||
@skipIf(sys.platform.startswith('win'), "Can't test permissions on Windows")
|
||||
def test_403(self):
|
||||
with TemporaryDirectory() as td:
|
||||
cm = FileContentsManager(root_dir=td)
|
||||
model = cm.new_untitled(type='file')
|
||||
os_path = cm._get_os_path(model['path'])
|
||||
|
||||
os.chmod(os_path, 0o400)
|
||||
try:
|
||||
with cm.open(os_path, 'w') as f:
|
||||
f.write("don't care")
|
||||
except HTTPError as e:
|
||||
self.assertEqual(e.status_code, 403)
|
||||
else:
|
||||
self.fail("Should have raised HTTPError(403)")
|
||||
|
||||
def test_escape_root(self):
|
||||
with TemporaryDirectory() as td:
|
||||
cm = FileContentsManager(root_dir=td)
|
||||
# make foo, bar next to root
|
||||
with open(os.path.join(cm.root_dir, '..', 'foo'), 'w') as f:
|
||||
f.write('foo')
|
||||
with open(os.path.join(cm.root_dir, '..', 'bar'), 'w') as f:
|
||||
f.write('bar')
|
||||
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.get('..')
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.get('foo/../../../bar')
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.delete('../foo')
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.rename('../foo', '../bar')
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.save(model={
|
||||
'type': 'file',
|
||||
'content': '',
|
||||
'format': 'text',
|
||||
}, path='../foo')
|
||||
|
||||
|
||||
class TestContentsManager(TestCase):
|
||||
@contextmanager
|
||||
def assertRaisesHTTPError(self, status, msg=None):
|
||||
msg = msg or f"Should have raised HTTPError({status})"
|
||||
try:
|
||||
yield
|
||||
except HTTPError as e:
|
||||
self.assertEqual(e.status_code, status)
|
||||
else:
|
||||
self.fail(msg)
|
||||
|
||||
def make_populated_dir(self, api_path):
|
||||
cm = self.contents_manager
|
||||
|
||||
self.make_dir(api_path)
|
||||
|
||||
cm.new(path="/".join([api_path, "nb.ipynb"]))
|
||||
cm.new(path="/".join([api_path, "file.txt"]))
|
||||
|
||||
def check_populated_dir_files(self, api_path):
|
||||
dir_model = self.contents_manager.get(api_path)
|
||||
|
||||
self.assertEqual(dir_model['path'], api_path)
|
||||
self.assertEqual(dir_model['type'], "directory")
|
||||
|
||||
for entry in dir_model['content']:
|
||||
if entry['type'] == "directory":
|
||||
continue
|
||||
elif entry['type'] == "file":
|
||||
self.assertEqual(entry['name'], "file.txt")
|
||||
complete_path = "/".join([api_path, "file.txt"])
|
||||
self.assertEqual(entry["path"], complete_path)
|
||||
elif entry['type'] == "notebook":
|
||||
self.assertEqual(entry['name'], "nb.ipynb")
|
||||
complete_path = "/".join([api_path, "nb.ipynb"])
|
||||
self.assertEqual(entry["path"], complete_path)
|
||||
|
||||
def setUp(self):
|
||||
self._temp_dir = TemporaryDirectory()
|
||||
self.td = self._temp_dir.name
|
||||
self.contents_manager = FileContentsManager(
|
||||
root_dir=self.td,
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
self._temp_dir.cleanup()
|
||||
|
||||
def make_dir(self, api_path):
|
||||
"""make a subdirectory at api_path
|
||||
|
||||
override in subclasses if contents are not on the filesystem.
|
||||
"""
|
||||
_make_dir(self.contents_manager, api_path)
|
||||
|
||||
def add_code_cell(self, nb):
|
||||
output = nbformat.new_output("display_data", {'application/javascript': "alert('hi');"})
|
||||
cell = nbformat.new_code_cell("print('hi')", outputs=[output])
|
||||
nb.cells.append(cell)
|
||||
|
||||
def new_notebook(self):
|
||||
cm = self.contents_manager
|
||||
model = cm.new_untitled(type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
|
||||
full_model = cm.get(path)
|
||||
nb = full_model['content']
|
||||
nb['metadata']['counter'] = int(1e6 * time.time())
|
||||
self.add_code_cell(nb)
|
||||
|
||||
cm.save(full_model, path)
|
||||
return nb, name, path
|
||||
|
||||
def test_new_untitled(self):
|
||||
cm = self.contents_manager
|
||||
# Test in root directory
|
||||
model = cm.new_untitled(type='notebook')
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertIn('type', model)
|
||||
self.assertEqual(model['type'], 'notebook')
|
||||
self.assertEqual(model['name'], 'Untitled.ipynb')
|
||||
self.assertEqual(model['path'], 'Untitled.ipynb')
|
||||
|
||||
# Test in sub-directory
|
||||
model = cm.new_untitled(type='directory')
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertIn('type', model)
|
||||
self.assertEqual(model['type'], 'directory')
|
||||
self.assertEqual(model['name'], 'Untitled Folder')
|
||||
self.assertEqual(model['path'], 'Untitled Folder')
|
||||
sub_dir = model['path']
|
||||
|
||||
model = cm.new_untitled(path=sub_dir)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertIn('type', model)
|
||||
self.assertEqual(model['type'], 'file')
|
||||
self.assertEqual(model['name'], 'untitled')
|
||||
self.assertEqual(model['path'], f'{sub_dir}/untitled')
|
||||
|
||||
# Test with a compound extension
|
||||
model = cm.new_untitled(path=sub_dir, ext='.foo.bar')
|
||||
self.assertEqual(model['name'], 'untitled.foo.bar')
|
||||
model = cm.new_untitled(path=sub_dir, ext='.foo.bar')
|
||||
self.assertEqual(model['name'], 'untitled1.foo.bar')
|
||||
|
||||
def test_modified_date(self):
|
||||
|
||||
cm = self.contents_manager
|
||||
|
||||
# Create a new notebook.
|
||||
nb, name, path = self.new_notebook()
|
||||
model = cm.get(path)
|
||||
|
||||
# Add a cell and save.
|
||||
self.add_code_cell(model['content'])
|
||||
cm.save(model, path)
|
||||
|
||||
# Reload notebook and verify that last_modified incremented.
|
||||
saved = cm.get(path)
|
||||
self.assertGreaterEqual(saved['last_modified'], model['last_modified'])
|
||||
|
||||
# Move the notebook and verify that last_modified stayed the same.
|
||||
# (The frontend fires a warning if last_modified increases on the
|
||||
# renamed file.)
|
||||
new_path = 'renamed.ipynb'
|
||||
cm.rename(path, new_path)
|
||||
renamed = cm.get(new_path)
|
||||
self.assertGreaterEqual(
|
||||
renamed['last_modified'],
|
||||
saved['last_modified'],
|
||||
)
|
||||
|
||||
def test_get(self):
|
||||
cm = self.contents_manager
|
||||
# Create a notebook
|
||||
model = cm.new_untitled(type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
|
||||
# Check that we 'get' on the notebook we just created
|
||||
model2 = cm.get(path)
|
||||
assert isinstance(model2, dict)
|
||||
self.assertIn('name', model2)
|
||||
self.assertIn('path', model2)
|
||||
self.assertEqual(model['name'], name)
|
||||
self.assertEqual(model['path'], path)
|
||||
|
||||
nb_as_file = cm.get(path, content=True, type='file')
|
||||
self.assertEqual(nb_as_file['path'], path)
|
||||
self.assertEqual(nb_as_file['type'], 'file')
|
||||
self.assertEqual(nb_as_file['format'], 'text')
|
||||
self.assertNotIsInstance(nb_as_file['content'], dict)
|
||||
|
||||
nb_as_bin_file = cm.get(path, content=True, type='file', format='base64')
|
||||
self.assertEqual(nb_as_bin_file['format'], 'base64')
|
||||
|
||||
# Test in sub-directory
|
||||
sub_dir = '/foo/'
|
||||
self.make_dir('foo')
|
||||
model = cm.new_untitled(path=sub_dir, ext='.ipynb')
|
||||
model2 = cm.get(sub_dir + name)
|
||||
assert isinstance(model2, dict)
|
||||
self.assertIn('name', model2)
|
||||
self.assertIn('path', model2)
|
||||
self.assertIn('content', model2)
|
||||
self.assertEqual(model2['name'], 'Untitled.ipynb')
|
||||
self.assertEqual(model2['path'], f'{sub_dir.strip("/")}/{name}')
|
||||
|
||||
# Test with a regular file.
|
||||
file_model_path = cm.new_untitled(path=sub_dir, ext='.txt')['path']
|
||||
file_model = cm.get(file_model_path)
|
||||
self.assertDictContainsSubset(
|
||||
{
|
||||
'content': '',
|
||||
'format': 'text',
|
||||
'mimetype': 'text/plain',
|
||||
'name': 'untitled.txt',
|
||||
'path': 'foo/untitled.txt',
|
||||
'type': 'file',
|
||||
'writable': True,
|
||||
},
|
||||
file_model,
|
||||
)
|
||||
self.assertIn('created', file_model)
|
||||
self.assertIn('last_modified', file_model)
|
||||
|
||||
# Test getting directory model
|
||||
|
||||
# Create a sub-sub directory to test getting directory contents with a
|
||||
# subdir.
|
||||
self.make_dir('foo/bar')
|
||||
dirmodel = cm.get('foo')
|
||||
self.assertEqual(dirmodel['type'], 'directory')
|
||||
self.assertIsInstance(dirmodel['content'], list)
|
||||
self.assertEqual(len(dirmodel['content']), 3)
|
||||
self.assertEqual(dirmodel['path'], 'foo')
|
||||
self.assertEqual(dirmodel['name'], 'foo')
|
||||
|
||||
# Directory contents should match the contents of each individual entry
|
||||
# when requested with content=False.
|
||||
model2_no_content = cm.get(sub_dir + name, content=False)
|
||||
file_model_no_content = cm.get('foo/untitled.txt', content=False)
|
||||
sub_sub_dir_no_content = cm.get('foo/bar', content=False)
|
||||
self.assertEqual(sub_sub_dir_no_content['path'], 'foo/bar')
|
||||
self.assertEqual(sub_sub_dir_no_content['name'], 'bar')
|
||||
|
||||
for entry in dirmodel['content']:
|
||||
# Order isn't guaranteed by the spec, so this is a hacky way of
|
||||
# verifying that all entries are matched.
|
||||
if entry['path'] == sub_sub_dir_no_content['path']:
|
||||
self.assertEqual(entry, sub_sub_dir_no_content)
|
||||
elif entry['path'] == model2_no_content['path']:
|
||||
self.assertEqual(entry, model2_no_content)
|
||||
elif entry['path'] == file_model_no_content['path']:
|
||||
self.assertEqual(entry, file_model_no_content)
|
||||
else:
|
||||
self.fail(f"Unexpected directory entry: {entry()}")
|
||||
|
||||
with self.assertRaises(HTTPError):
|
||||
cm.get('foo', type='file')
|
||||
|
||||
def test_update(self):
|
||||
cm = self.contents_manager
|
||||
# Create a notebook
|
||||
model = cm.new_untitled(type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
|
||||
# Change the name in the model for rename
|
||||
model['path'] = 'test.ipynb'
|
||||
model = cm.update(model, path)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertEqual(model['name'], 'test.ipynb')
|
||||
|
||||
# Make sure the old name is gone
|
||||
self.assertRaises(HTTPError, cm.get, path)
|
||||
|
||||
# Test in sub-directory
|
||||
# Create a directory and notebook in that directory
|
||||
sub_dir = '/foo/'
|
||||
self.make_dir('foo')
|
||||
model = cm.new_untitled(path=sub_dir, type='notebook')
|
||||
path = model['path']
|
||||
|
||||
# Change the name in the model for rename
|
||||
d = path.rsplit('/', 1)[0]
|
||||
new_path = model['path'] = d + '/test_in_sub.ipynb'
|
||||
model = cm.update(model, path)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertEqual(model['name'], 'test_in_sub.ipynb')
|
||||
self.assertEqual(model['path'], new_path)
|
||||
|
||||
# Make sure the old name is gone
|
||||
self.assertRaises(HTTPError, cm.get, path)
|
||||
|
||||
def test_save(self):
|
||||
cm = self.contents_manager
|
||||
# Create a notebook
|
||||
model = cm.new_untitled(type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
|
||||
# Get the model with 'content'
|
||||
full_model = cm.get(path)
|
||||
|
||||
# Save the notebook
|
||||
model = cm.save(full_model, path)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertEqual(model['name'], name)
|
||||
self.assertEqual(model['path'], path)
|
||||
|
||||
# Test in sub-directory
|
||||
# Create a directory and notebook in that directory
|
||||
sub_dir = '/foo/'
|
||||
self.make_dir('foo')
|
||||
model = cm.new_untitled(path=sub_dir, type='notebook')
|
||||
name = model['name']
|
||||
path = model['path']
|
||||
model = cm.get(path)
|
||||
|
||||
# Change the name in the model for rename
|
||||
model = cm.save(model, path)
|
||||
assert isinstance(model, dict)
|
||||
self.assertIn('name', model)
|
||||
self.assertIn('path', model)
|
||||
self.assertEqual(model['name'], 'Untitled.ipynb')
|
||||
self.assertEqual(model['path'], 'foo/Untitled.ipynb')
|
||||
|
||||
def test_delete(self):
|
||||
cm = self.contents_manager
|
||||
# Create a notebook
|
||||
nb, name, path = self.new_notebook()
|
||||
|
||||
# Delete the notebook
|
||||
cm.delete(path)
|
||||
|
||||
# Check that deleting a non-existent path raises an error.
|
||||
self.assertRaises(HTTPError, cm.delete, path)
|
||||
|
||||
# Check that a 'get' on the deleted notebook raises and error
|
||||
self.assertRaises(HTTPError, cm.get, path)
|
||||
|
||||
def test_rename(self):
|
||||
cm = self.contents_manager
|
||||
# Create a new notebook
|
||||
nb, name, path = self.new_notebook()
|
||||
|
||||
# Rename the notebook
|
||||
cm.rename(path, "changed_path")
|
||||
|
||||
# Attempting to get the notebook under the old name raises an error
|
||||
self.assertRaises(HTTPError, cm.get, path)
|
||||
# Fetching the notebook under the new name is successful
|
||||
assert isinstance(cm.get("changed_path"), dict)
|
||||
|
||||
# Test validation. Currently, only Windows has a non-empty set of invalid characters
|
||||
if sys.platform == 'win32' and isinstance(cm, FileContentsManager):
|
||||
with self.assertRaisesHTTPError(400):
|
||||
cm.rename("changed_path", "prevent: in name")
|
||||
|
||||
# Ported tests on nested directory renaming from pgcontents
|
||||
all_dirs = ['foo', 'bar', 'foo/bar', 'foo/bar/foo', 'foo/bar/foo/bar']
|
||||
unchanged_dirs = all_dirs[:2]
|
||||
changed_dirs = all_dirs[2:]
|
||||
|
||||
for _dir in all_dirs:
|
||||
self.make_populated_dir(_dir)
|
||||
self.check_populated_dir_files(_dir)
|
||||
|
||||
# Renaming to an existing directory should fail
|
||||
for src, dest in combinations(all_dirs, 2):
|
||||
with self.assertRaisesHTTPError(409):
|
||||
cm.rename(src, dest)
|
||||
|
||||
# Creating a notebook in a non_existant directory should fail
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.new_untitled("foo/bar_diff", ext=".ipynb")
|
||||
|
||||
cm.rename("foo/bar", "foo/bar_diff")
|
||||
|
||||
# Assert that unchanged directories remain so
|
||||
for unchanged in unchanged_dirs:
|
||||
self.check_populated_dir_files(unchanged)
|
||||
|
||||
# Assert changed directories can no longer be accessed under old names
|
||||
for changed_dirname in changed_dirs:
|
||||
with self.assertRaisesHTTPError(404):
|
||||
cm.get(changed_dirname)
|
||||
|
||||
new_dirname = changed_dirname.replace("foo/bar", "foo/bar_diff", 1)
|
||||
|
||||
self.check_populated_dir_files(new_dirname)
|
||||
|
||||
# Created a notebook in the renamed directory should work
|
||||
cm.new_untitled("foo/bar_diff", ext=".ipynb")
|
||||
|
||||
def test_delete_root(self):
|
||||
cm = self.contents_manager
|
||||
with self.assertRaises(HTTPError) as err:
|
||||
cm.delete('')
|
||||
self.assertEqual(err.exception.status_code, 400)
|
||||
|
||||
def test_copy(self):
|
||||
cm = self.contents_manager
|
||||
parent = 'å b'
|
||||
name = 'nb √.ipynb'
|
||||
path = f'{parent}/{name}'
|
||||
self.make_dir(parent)
|
||||
|
||||
orig = cm.new(path=path)
|
||||
# copy with unspecified name
|
||||
copy = cm.copy(path)
|
||||
self.assertEqual(copy['name'], orig['name'].replace('.ipynb', '-Copy1.ipynb'))
|
||||
|
||||
# copy with specified name
|
||||
copy2 = cm.copy(path, 'å b/copy 2.ipynb')
|
||||
self.assertEqual(copy2['name'], 'copy 2.ipynb')
|
||||
self.assertEqual(copy2['path'], 'å b/copy 2.ipynb')
|
||||
# copy with specified path
|
||||
copy2 = cm.copy(path, '/')
|
||||
self.assertEqual(copy2['name'], name)
|
||||
self.assertEqual(copy2['path'], name)
|
||||
|
||||
def test_trust_notebook(self):
|
||||
cm = self.contents_manager
|
||||
nb, name, path = self.new_notebook()
|
||||
|
||||
untrusted = cm.get(path)['content']
|
||||
assert not cm.notary.check_cells(untrusted)
|
||||
|
||||
# print(untrusted)
|
||||
cm.trust_notebook(path)
|
||||
trusted = cm.get(path)['content']
|
||||
# print(trusted)
|
||||
assert cm.notary.check_cells(trusted)
|
||||
|
||||
def test_mark_trusted_cells(self):
|
||||
cm = self.contents_manager
|
||||
nb, name, path = self.new_notebook()
|
||||
|
||||
cm.mark_trusted_cells(nb, path)
|
||||
for cell in nb.cells:
|
||||
if cell.cell_type == 'code':
|
||||
assert not cell.metadata.trusted
|
||||
|
||||
cm.trust_notebook(path)
|
||||
nb = cm.get(path)['content']
|
||||
for cell in nb.cells:
|
||||
if cell.cell_type == 'code':
|
||||
assert cell.metadata.trusted
|
||||
|
||||
def test_check_and_sign(self):
|
||||
cm = self.contents_manager
|
||||
nb, name, path = self.new_notebook()
|
||||
|
||||
cm.mark_trusted_cells(nb, path)
|
||||
cm.check_and_sign(nb, path)
|
||||
assert not cm.notary.check_signature(nb)
|
||||
|
||||
cm.trust_notebook(path)
|
||||
nb = cm.get(path)['content']
|
||||
cm.mark_trusted_cells(nb, path)
|
||||
cm.check_and_sign(nb, path)
|
||||
assert cm.notary.check_signature(nb)
|
||||
|
||||
|
||||
class TestContentsManagerNoAtomic(TestContentsManager):
|
||||
"""
|
||||
Make same test in no atomic case than in atomic case, using inheritance
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self._temp_dir = TemporaryDirectory()
|
||||
self.td = self._temp_dir.name
|
||||
self.contents_manager = FileContentsManager(
|
||||
root_dir = self.td,
|
||||
)
|
||||
self.contents_manager.use_atomic_writing = False
|
Reference in New Issue
Block a user