Merge branch 'dev/apiCaching' into devel

# Conflicts:
#	src/octoprint/filemanager/storage.py
#	src/octoprint/static/js/app/viewmodels/files.js
This commit is contained in:
Gina Häußge 2016-09-16 17:04:40 +02:00
commit d2ed80baca
31 changed files with 763 additions and 211 deletions

View file

@ -39,7 +39,8 @@ INSTALL_REQUIRES = [
"awesome-slugify>=1.6.5,<1.7",
"feedparser>=5.2.1,<5.3",
"chainmap>=1.0.2,<1.1",
"future>=0.15,<0.16"
"future>=0.15,<0.16",
"scandir>=1.3,<1.4"
]
# Additional requirements for optional install options

View file

@ -226,6 +226,10 @@ class FileManager(object):
return
del self._storage_managers[type]
@property
def registered_storages(self):
return list(self._storage_managers.keys())
@property
def slicing_enabled(self):
return self._slicing_manager.slicing_enabled
@ -520,6 +524,9 @@ class FileManager(object):
def path_in_storage(self, destination, path):
return self._storage(destination).path_in_storage(path)
def last_modified(self, destination, path=None, recursive=False):
return self._storage(destination).last_modified(path=path, recursive=recursive)
def _storage(self, destination):
if not destination in self._storage_managers:
raise NoSuchStorage("No storage configured for destination {destination}".format(**locals()))

View file

@ -11,6 +11,11 @@ import os
import pylru
import shutil
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
from octoprint.util import atomic_write
from contextlib import contextmanager
from copy import deepcopy
@ -39,6 +44,20 @@ class StorageInterface(object):
return
yield
def last_modified(self, path=None, recursive=False):
"""
Get the last modification date of the specified ``path`` or ``path``'s subtree.
Args:
path (str or None): Path for which to determine the subtree's last modification date. If left out or
set to None, defatuls to storage root.
recursive (bool): Whether to determine only the date of the specified ``path`` (False, default) or
the whole ``path``'s subtree (True).
Returns: (float) The last modification date of the indicated subtree
"""
raise NotImplementedError()
def file_in_path(self, path, filepath):
"""
Returns whether the file indicated by ``file`` is inside ``path`` or not.
@ -453,23 +472,40 @@ class LocalFileStorage(StorageInterface):
metadata = self._get_metadata(path)
if not metadata:
metadata = dict()
for entry in os.listdir(path):
if is_hidden_path(entry) or not octoprint.filemanager.valid_file_type(entry):
for entry in scandir(path):
if is_hidden_path(entry.name) or not octoprint.filemanager.valid_file_type(entry.name):
continue
absolute_path = os.path.join(path, entry)
if os.path.isfile(absolute_path):
if not entry in metadata or not isinstance(metadata[entry], dict) or not "analysis" in metadata[entry]:
printer_profile_rels = self.get_link(absolute_path, "printerprofile")
if entry.is_file():
if not entry.name in metadata or not isinstance(metadata[entry.name], dict) or not "analysis" in metadata[entry.name]:
printer_profile_rels = self.get_link(entry.path, "printerprofile")
if printer_profile_rels:
printer_profile_id = printer_profile_rels[0]["id"]
else:
printer_profile_id = None
yield entry, absolute_path, printer_profile_id
elif os.path.isdir(absolute_path):
for sub_entry in self._analysis_backlog_generator(absolute_path):
yield self.join_path(entry, sub_entry[0]), sub_entry[1], sub_entry[2]
yield entry.name, entry.path, printer_profile_id
elif os.path.isdir(entry.path):
for sub_entry in self._analysis_backlog_generator(entry.path):
yield self.join_path(entry.name, sub_entry[0]), sub_entry[1], sub_entry[2]
def last_modified(self, path=None, recursive=False):
if path is None:
path = self.basefolder
else:
path = os.path.join(self.basefolder, path)
def last_modified_for_path(p):
metadata = os.path.join(p, ".metadata.yaml")
if os.path.exists(metadata):
return max(os.stat(p).st_mtime, os.stat(metadata).st_mtime)
else:
return os.stat(p).st_mtime
if recursive:
return max(last_modified_for_path(root) for root, _, _ in walk(path))
else:
return last_modified_for_path(path)
def file_in_path(self, path, filepath):
filepath = self.sanitize_path(filepath)
@ -517,10 +553,14 @@ class LocalFileStorage(StorageInterface):
if not os.path.exists(folder_path):
return
contents = os.listdir(folder_path)
if ".metadata.yaml" in contents:
contents.remove(".metadata.yaml")
if contents and not recursive:
empty = True
for entry in scandir(folder_path):
if entry.name == ".metadata.yaml":
continue
empty = False
break
if not empty and not recursive:
raise StorageError("{name} in {path} is not empty".format(**locals()), code=StorageError.NOT_EMPTY)
import shutil
@ -1078,57 +1118,65 @@ class LocalFileStorage(StorageInterface):
metadata_dirty = False
result = dict()
for entry in os.listdir(path):
if is_hidden_path(entry):
for entry in scandir(path):
if is_hidden_path(entry.name):
# no hidden files and folders
continue
entry_path = os.path.join(path, entry)
entry_name = entry.name
entry_path = entry.path
entry_is_file = entry.is_file()
entry_is_dir = entry.is_dir()
entry_stat = entry.stat()
try:
entry, entry_path = self._sanitize_entry(entry, path, entry_path)
new_entry_name, new_entry_path = self._sanitize_entry(entry_name, path, entry_path)
if entry_name != new_entry_name or entry_path != new_entry_path:
entry_name = new_entry_name
entry_path = new_entry_path
entry_stat = os.stat(entry_path)
except:
# error while trying to rename the file, we'll continue here and ignore it
continue
path_in_location = entry if not base else base + entry
path_in_location = entry_name if not base else base + entry_name
# file handling
if os.path.isfile(entry_path):
type_path = octoprint.filemanager.get_file_type(entry)
if entry_is_file:
type_path = octoprint.filemanager.get_file_type(entry_name)
if not type_path:
# only supported extensions
continue
else:
file_type = type_path[0]
if entry in metadata and isinstance(metadata[entry], dict):
entry_data = metadata[entry]
if entry_name in metadata and isinstance(metadata[entry_name], dict):
entry_data = metadata[entry_name]
else:
entry_data = self._add_basic_metadata(path, entry, save=False, metadata=metadata)
entry_data = self._add_basic_metadata(path, entry_name, save=False, metadata=metadata)
metadata_dirty = True
# TODO extract model hash from source if possible to recreate link
if not entry_filter or entry_filter(entry, entry_data):
if not entry_filter or entry_filter(entry_name, entry_data):
# only add files passing the optional filter
extended_entry_data = dict()
extended_entry_data.update(entry_data)
extended_entry_data["name"] = entry
extended_entry_data["name"] = entry_name
extended_entry_data["path"] = path_in_location
extended_entry_data["type"] = file_type
extended_entry_data["typePath"] = type_path
stat = os.stat(entry_path)
stat = entry_stat
if stat:
extended_entry_data["size"] = stat.st_size
extended_entry_data["date"] = int(stat.st_mtime)
result[entry] = extended_entry_data
result[entry_name] = extended_entry_data
# folder recursion
elif os.path.isdir(entry_path):
elif entry_is_dir:
entry_data = dict(
name=entry,
name=entry_name,
path=path_in_location,
type="folder",
type_path=["folder"]
@ -1138,7 +1186,7 @@ class LocalFileStorage(StorageInterface):
recursive=recursive)
entry_data["children"] = sub_result
if not entry_filter or entry_filter(entry, entry_data):
if not entry_filter or entry_filter(entry_name, entry_data):
def get_size():
total_size = 0
for element in entry_data["children"].values():
@ -1153,7 +1201,7 @@ class LocalFileStorage(StorageInterface):
if recursive:
extended_entry_data["size"] = get_size()
result[entry] = extended_entry_data
result[entry_name] = extended_entry_data
# TODO recreate links if we have metadata less entries

View file

@ -35,6 +35,11 @@ import logging
import pkg_resources
import pkginfo
try:
from os import scandir
except ImportError:
from scandir import scandir
EntryPointOrigin = namedtuple("EntryPointOrigin", "type, entry_point, module_name, package_name, package_version")
FolderOrigin = namedtuple("FolderOrigin", "type, folder")
@ -535,13 +540,11 @@ class PluginManager(object):
self.logger.warn("Plugin folder {folder} could not be found, skipping it".format(folder=folder))
continue
entries = os.listdir(folder)
for entry in entries:
path = os.path.join(folder, entry)
if os.path.isdir(path) and os.path.isfile(os.path.join(path, "__init__.py")):
key = entry
elif os.path.isfile(path) and entry.endswith(".py"):
key = entry[:-3] # strip off the .py extension
for entry in scandir(folder):
if entry.is_dir() and os.path.isfile(os.path.join(entry.path, "__init__.py")):
key = entry.name
elif entry.is_file() and entry.name.endswith(".py"):
key = entry.name[:-3] # strip off the .py extension
else:
continue
@ -637,7 +640,7 @@ class PluginManager(object):
else:
return None
except:
self.logger.warn("Could not locate plugin {key}")
self.logger.warn("Could not locate plugin {key}".format(key=key))
return None
plugin = self._import_plugin(key, *module, name=name, version=version, summary=summary, author=author, url=url, license=license)

View file

@ -19,7 +19,7 @@ import feedparser
import flask
from octoprint.server import admin_permission
from octoprint.server.util.flask import restricted_access
from octoprint.server.util.flask import restricted_access, with_revalidation_checking, check_etag
from flask.ext.babel import gettext
class AnnouncementPlugin(octoprint.plugin.AssetPlugin,
@ -100,32 +100,54 @@ class AnnouncementPlugin(octoprint.plugin.AssetPlugin,
result = dict()
force = "force" in flask.request.values and flask.request.values["force"] in valid_boolean_trues
channel_data = self._fetch_all_channels(force=force)
channel_configs = self._get_channel_configs(force=force)
force = flask.request.values.get("force", "false") in valid_boolean_trues
enabled = self._settings.get(["enabled_channels"])
forced = self._settings.get(["forced_channels"])
for key, data in channel_configs.items():
read_until = channel_configs[key].get("read_until", None)
entries = sorted(self._to_internal_feed(channel_data.get(key, []), read_until=read_until), key=lambda e: e["published"], reverse=True)
unread = len(filter(lambda e: not e["read"], entries))
channel_configs = self._get_channel_configs(force=force)
if read_until is None and entries:
last = entries[0]["published"]
self._mark_read_until(key, last)
def view():
channel_data = self._fetch_all_channels(force=force)
result[key] = dict(channel=data["name"],
url=data["url"],
priority=data.get("priority", 2),
enabled=key in enabled or key in forced,
forced=key in forced,
data=entries,
unread=unread)
for key, data in channel_configs.items():
read_until = channel_configs[key].get("read_until", None)
entries = sorted(self._to_internal_feed(channel_data.get(key, []), read_until=read_until), key=lambda e: e["published"], reverse=True)
unread = len(filter(lambda e: not e["read"], entries))
return flask.jsonify(result)
if read_until is None and entries:
last = entries[0]["published"]
self._mark_read_until(key, last)
result[key] = dict(channel=data["name"],
url=data["url"],
priority=data.get("priority", 2),
enabled=key in enabled or key in forced,
forced=key in forced,
data=entries,
unread=unread)
return flask.jsonify(result)
def etag():
import hashlib
hash = hashlib.sha1()
hash.update(repr(sorted(enabled)))
hash.update(repr(sorted(forced)))
for channel in sorted(channel_configs.keys()):
hash.update(repr(channel_configs[channel]))
channel_data = self._get_channel_data_from_cache(channel, channel_configs[channel])
hash.update(repr(channel_data))
return hash.hexdigest()
def condition():
return check_etag(etag())
return with_revalidation_checking(etag_factory=lambda *args, **kwargs: etag(),
condition=lambda *args, **kwargs: condition(),
unless=lambda: force)(view)()
@octoprint.plugin.BlueprintPlugin.route("/channels/<channel>", methods=["POST"])
@restricted_access

View file

@ -188,8 +188,7 @@ $(function() {
};
self.requestData = function() {
OctoPrint.slicing.listProfilesForSlicer("cura")
.done(self.fromResponse);
self.slicingViewModel.requestData();
};
self.fromResponse = function(data) {
@ -208,13 +207,19 @@ $(function() {
self.onBeforeBinding = function () {
self.settings = self.settingsViewModel.settings;
self.requestData();
//self.requestData();
};
self.onSettingsHidden = function() {
self.resetPathTest();
};
self.onSlicingData = function(data) {
if (data && data.hasOwnProperty("cura") && data.cura.hasOwnProperty("profiles")) {
self.fromResponse(data.cura.profiles);
}
};
self.resetPathTest = function() {
self.pathBroken(false);
self.pathOk(false);

View file

@ -10,7 +10,7 @@ import octoprint.plugin
import octoprint.plugin.core
from octoprint.settings import valid_boolean_trues
from octoprint.server.util.flask import restricted_access
from octoprint.server.util.flask import restricted_access, with_revalidation_checking, check_etag
from octoprint.server import admin_permission, VERSION
from octoprint.util.pip import LocalPipCaller, UnknownPip
@ -176,26 +176,43 @@ class PluginManagerPlugin(octoprint.plugin.SimpleApiPlugin,
if not admin_permission.can():
return make_response("Insufficient rights", 403)
if "refresh_repository" in request.values and request.values["refresh_repository"] in valid_boolean_trues:
refresh_repository = request.values.get("refresh_repository", "false") in valid_boolean_trues
if refresh_repository:
self._repository_available = self._refresh_repository()
return jsonify(plugins=self._get_plugins(),
repository=dict(
available=self._repository_available,
plugins=self._repository_plugins
),
os=self._get_os(),
octoprint=self._get_octoprint_version_string(),
pip=dict(
available=self._pip_caller.available,
version=self._pip_caller.version_string,
install_dir=self._pip_caller.install_dir,
use_user=self._pip_caller.use_user,
virtual_env=self._pip_caller.virtual_env,
additional_args=self._settings.get(["pip_args"]),
python=sys.executable
def view():
return jsonify(plugins=self._get_plugins(),
repository=dict(
available=self._repository_available,
plugins=self._repository_plugins
),
os=self._get_os(),
octoprint=self._get_octoprint_version_string(),
pip=dict(
available=self._pip_caller.available,
version=self._pip_caller.version_string,
install_dir=self._pip_caller.install_dir,
use_user=self._pip_caller.use_user,
virtual_env=self._pip_caller.virtual_env,
additional_args=self._settings.get(["pip_args"]),
python=sys.executable
))
def etag():
import hashlib
hash = hashlib.sha1()
hash.update(repr(self._get_plugins()))
hash.update(str(self._repository_available))
hash.update(repr(self._repository_plugins))
return hash.hexdigest()
def condition():
return check_etag(etag())
return with_revalidation_checking(etag_factory=lambda *args, **kwargs: etag(),
condition=lambda *args, **kwargs: condition(),
unless=lambda: refresh_repository)(view)()
def on_api_command(self, command, data):
if not admin_permission.can():
return make_response("Insufficient rights", 403)

View file

@ -19,7 +19,7 @@ import hashlib
from . import version_checks, updaters, exceptions, util, cli
from octoprint.server.util.flask import restricted_access
from octoprint.server.util.flask import restricted_access, with_revalidation_checking, check_etag
from octoprint.server import admin_permission, VERSION, REVISION, BRANCH
from octoprint.util import dict_merge
import octoprint.settings
@ -411,17 +411,46 @@ class SoftwareUpdatePlugin(octoprint.plugin.BlueprintPlugin,
else:
check_targets = None
if "force" in flask.request.values and flask.request.values["force"] in octoprint.settings.valid_boolean_trues:
force = True
else:
force = False
force = flask.request.values.get("force", "false") in octoprint.settings.valid_boolean_trues
try:
information, update_available, update_possible = self.get_current_versions(check_targets=check_targets, force=force)
return flask.jsonify(dict(status="updatePossible" if update_available and update_possible else "updateAvailable" if update_available else "current",
information=information))
except exceptions.ConfigurationInvalid as e:
flask.make_response("Update not properly configured, can't proceed: %s" % e.message, 500)
def view():
try:
information, update_available, update_possible = self.get_current_versions(check_targets=check_targets, force=force)
return flask.jsonify(dict(status="updatePossible" if update_available and update_possible else "updateAvailable" if update_available else "current",
information=information))
except exceptions.ConfigurationInvalid as e:
return flask.make_response("Update not properly configured, can't proceed: %s" % e.message, 500)
def etag():
checks = self._get_configured_checks()
targets = check_targets
if targets is None:
targets = checks.keys()
import hashlib
hash = hashlib.sha1()
targets = sorted(targets)
for target in targets:
current_hash = self._get_check_hash(checks.get(target, dict()))
if target in self._version_cache and not force:
data = self._version_cache[target]
hash.update(current_hash)
hash.update(str(data["timestamp"] + self._version_cache_ttl >= time.time() > data["timestamp"]))
hash.update(repr(data["information"]))
hash.update(str(data["available"]))
hash.update(str(data["possible"]))
hash.update(",".join(targets))
return hash.hexdigest()
def condition():
return check_etag(etag())
return with_revalidation_checking(etag_factory=lambda *args, **kwargs: etag(),
condition=lambda *args, **kwargs: condition(),
unless=lambda: force)(view)()
@octoprint.plugin.BlueprintPlugin.route("/update", methods=["POST"])

View file

@ -12,25 +12,46 @@
var updateUrl = url + "update";
exports.checkEntries = function(entries, force, opts) {
if (arguments.length == 1 && _.isObject(arguments[0])) {
var params = arguments[0];
entries = params.entries;
force = params.force;
opts = params.opts;
}
entries = entries || [];
if (typeof entries == "string") {
entries = [entries];
}
var data = {
force: !!force
};
var data = {};
if (!!force) {
data.force = true;
}
if (entries && entries.length) {
data["check"] = entries.join(",")
data.check = entries.join(",");
}
return OctoPrint.getWithQuery(checkUrl, data, opts);
};
exports.check = function(force, opts) {
return exports.checkEntries([], force, opts);
if (arguments.length == 1 && _.isObject(arguments[0])) {
var params = arguments[0];
force = params.force;
opts = params.opts;
}
return exports.checkEntries({entries: [], force: force, opts: opts});
};
exports.update = function(entries, force, opts) {
if (arguments.length == 1 && _.isObject(arguments[0])) {
var params = arguments[0];
entries = params.entries;
force = params.force;
opts = params.opts;
}
entries = entries || [];
if (typeof entries == "string") {
entries = [entries];
@ -44,6 +65,12 @@
};
exports.updateAll = function(force, opts) {
if (arguments.length == 1 && _.isObject(arguments[0])) {
var params = arguments[0];
force = params.force;
opts = params.opts;
}
var data = {
force: !!force
};

View file

@ -11,6 +11,11 @@ import copy
import re
import logging
try:
from os import scandir
except ImportError:
from scandir import scandir
from octoprint.settings import settings
from octoprint.util import dict_merge, dict_sanitize, dict_contains_keys, is_hidden_path
@ -242,6 +247,12 @@ class PrinterProfileManager(object):
def profile_count(self):
return len(self._load_all_identifiers())
@property
def last_modified(self):
dates = [os.stat(self._folder).st_mtime]
dates += [entry.stat().st_mtime for entry in scandir(self._folder) if entry.name.endswith(".profile")]
return max(dates)
def get_default(self):
default = settings().get(["printerProfiles", "default"])
if default is not None and self.exists(default):
@ -297,16 +308,15 @@ class PrinterProfileManager(object):
def _load_all_identifiers(self):
results = dict(_default=None)
for entry in os.listdir(self._folder):
if is_hidden_path(entry) or not entry.endswith(".profile") or entry == "_default.profile":
for entry in scandir(self._folder):
if is_hidden_path(entry.name) or not entry.name.endswith(".profile") or entry.name == "_default.profile":
continue
path = os.path.join(self._folder, entry)
if not os.path.isfile(path):
if not entry.is_file():
continue
identifier = entry[:-len(".profile")]
results[identifier] = path
identifier = entry.name[:-len(".profile")]
results[identifier] = entry.path
return results
def _load_from_path(self, path):

View file

@ -19,7 +19,7 @@ import octoprint.server
import octoprint.plugin
from octoprint.server import admin_permission, NO_CONTENT
from octoprint.settings import settings as s, valid_boolean_trues
from octoprint.server.util import noCachingResponseHandler, apiKeyRequestHandler, corsResponseHandler
from octoprint.server.util import noCachingExceptGetResponseHandler, apiKeyRequestHandler, corsResponseHandler
from octoprint.server.util.flask import restricted_access, get_json_command_from_request, passive_login
@ -43,7 +43,7 @@ from . import system as api_system
VERSION = "0.1"
api.after_request(noCachingResponseHandler)
api.after_request(noCachingExceptGetResponseHandler)
api.before_request(apiKeyRequestHandler)
api.after_request(corsResponseHandler)

View file

@ -10,7 +10,7 @@ from flask import request, jsonify, make_response, url_for
from octoprint.filemanager.destinations import FileDestinations
from octoprint.settings import settings, valid_boolean_trues
from octoprint.server import printer, fileManager, slicingManager, eventManager, NO_CONTENT
from octoprint.server.util.flask import restricted_access, get_json_command_from_request
from octoprint.server.util.flask import restricted_access, get_json_command_from_request, with_revalidation_checking
from octoprint.server.api import api
from octoprint.events import Events
import octoprint.filemanager
@ -19,15 +19,82 @@ import octoprint.filemanager.storage
import octoprint.slicing
import psutil
import hashlib
import logging
import threading
#~~ GCODE file handling
_file_cache = dict()
_file_cache_mutex = threading.RLock()
def _clear_file_cache():
with _file_cache_mutex:
_file_cache.clear()
def _create_lastmodified(path, recursive):
if path.endswith("/files"):
# all storages involved
lms = [0]
for storage in fileManager.registered_storages:
try:
lms.append(fileManager.last_modified(storage, recursive=recursive))
except:
logging.getLogger(__name__).exception("There was an error retrieving the last modified data from storage {}".format(storage))
lms.append(None)
if filter(lambda x: x is None, lms):
# we return None if ANY of the involved storages returned None
return None
# if we reach this point, we return the maximum of all dates
return max(lms)
elif path.endswith("/files/local"):
# only local storage involved
try:
return fileManager.last_modified(FileDestinations.LOCAL, recursive=recursive)
except:
logging.getLogger(__name__).exception("There was an error retrieving the last modified data from storage {}".format(FileDestinations.LOCAL))
return None
else:
return None
def _create_etag(path, recursive, lm=None):
if lm is None:
lm = _create_lastmodified(path, recursive)
if lm is None:
return None
hash = hashlib.sha1()
hash.update(str(lm))
hash.update(str(recursive))
if path.endswith("/files") or path.endswith("/files/sdcard"):
# include sd data in etag
hash.update(repr(sorted(printer.get_sd_files(), key=lambda x: x[0])))
return hash.hexdigest()
@api.route("/files", methods=["GET"])
@with_revalidation_checking(etag_factory=lambda lm=None: _create_etag(request.path,
request.values.get("recursive", False),
lm=lm),
lastmodified_factory=lambda: _create_lastmodified(request.path,
request.values.get("recursive", False)),
unless=lambda: request.values.get("force", False) or request.values.get("_refresh", False))
def readGcodeFiles():
filter = "filter" in request.values and request.values["recursive"] in valid_boolean_trues
recursive = "recursive" in request.values and request.values["recursive"] in valid_boolean_trues
filter = request.values.get("filter", "false") in valid_boolean_trues
recursive = request.values.get("recursive", "false") in valid_boolean_trues
force = request.values.get("force", "false") in valid_boolean_trues
if force:
_clear_file_cache()
files = _getFileList(FileDestinations.LOCAL, filter=filter, recursive=recursive)
files.extend(_getFileList(FileDestinations.SDCARD))
@ -37,13 +104,25 @@ def readGcodeFiles():
@api.route("/files/<string:origin>", methods=["GET"])
@with_revalidation_checking(etag_factory=lambda lm=None: _create_etag(request.path,
request.values.get("recursive", False),
lm=lm),
lastmodified_factory=lambda: _create_lastmodified(request.path,
request.values.get("recursive", False)),
unless=lambda: request.values.get("force", False) or request.values.get("_refresh", False))
def readGcodeFilesForOrigin(origin):
if origin not in [FileDestinations.LOCAL, FileDestinations.SDCARD]:
return make_response("Unknown origin: %s" % origin, 404)
recursive = False
if "recursive" in request.values:
recursive = request.values["recursive"] in valid_boolean_trues
recursive = request.values.get("recursive", "false") in valid_boolean_trues
force = request.values.get("force", "false") in valid_boolean_trues
if force:
with _file_cache_mutex:
try:
del _file_cache[origin]
except KeyError:
pass
files = _getFileList(origin, recursive=recursive)
@ -89,24 +168,29 @@ def _getFileList(origin, path=None, filter=None, recursive=False):
if filter:
filter_func = lambda entry, entry_data: octoprint.filemanager.valid_file_type(entry, type=filter)
files = fileManager.list_files(origin, path=path, filter=filter_func, recursive=recursive)[origin].values()
with _file_cache_mutex:
files, lastmodified = _file_cache.get("{}:{}:{}:{}".format(origin, path, recursive, filter), ([], None))
if lastmodified is None or lastmodified < fileManager.last_modified(origin, path=path, recursive=recursive):
files = fileManager.list_files(origin, path=path, filter=filter_func, recursive=recursive)[origin].values()
lastmodified = fileManager.last_modified(origin, path=path, recursive=recursive)
_file_cache["{}:{}:{}:{}".format(origin, path, recursive, filter)] = (files, lastmodified)
def analyse_recursively(files, path=None):
if path is None:
path = ""
result = []
for file_or_folder in files:
# make a shallow copy in order to not accidentally modify the cached data
file_or_folder = dict(file_or_folder)
file_or_folder["origin"] = FileDestinations.LOCAL
if file_or_folder["type"] == "folder":
if "children" in file_or_folder:
file_or_folder["children"] = analyse_recursively(file_or_folder["children"].values(), path + file_or_folder["name"] + "/")
file_or_folder.update({
"refs": {
"resource": url_for(".readGcodeFile", target=FileDestinations.LOCAL, filename=path + file_or_folder["name"], _external=True)
}
})
file_or_folder["refs"] = dict(resource=url_for(".readGcodeFile", target=FileDestinations.LOCAL, filename=path + file_or_folder["name"], _external=True))
else:
if "analysis" in file_or_folder and octoprint.filemanager.valid_file_type(file_or_folder["name"], type="gcode"):
file_or_folder["gcodeAnalysis"] = file_or_folder["analysis"]
@ -137,16 +221,14 @@ def _getFileList(origin, path=None, filter=None, recursive=False):
prints["last"]["printTime"] = last["printTime"]
file_or_folder["prints"] = prints
file_or_folder.update({
"refs": {
"resource": url_for(".readGcodeFile", target=FileDestinations.LOCAL, filename=file_or_folder["path"], _external=True),
"download": url_for("index", _external=True) + "downloads/files/" + FileDestinations.LOCAL + "/" + file_or_folder["path"]
}
})
file_or_folder["refs"] = dict(resource=url_for(".readGcodeFile", target=FileDestinations.LOCAL, filename=file_or_folder["path"], _external=True),
download=url_for("index", _external=True) + "downloads/files/" + FileDestinations.LOCAL + "/" + file_or_folder["path"])
return files
result.append(file_or_folder)
analyse_recursively(files)
return result
files = analyse_recursively(files)
return files

View file

@ -9,6 +9,11 @@ import os
import tarfile
import zipfile
try:
from os import scandir
except ImportError:
from scandir import scandir
from collections import defaultdict
from flask import request, jsonify, make_response
@ -33,10 +38,8 @@ def getInstalledLanguagePacks():
core_packs = []
plugin_packs = defaultdict(lambda: dict(identifier=None, display=None, languages=[]))
for folder in os.listdir(translation_folder):
path = os.path.join(translation_folder, folder)
if not os.path.isdir(path):
for entry in scandir(translation_folder):
if not entry.is_dir():
continue
def load_meta(path, locale):
@ -61,24 +64,23 @@ def getInstalledLanguagePacks():
meta["locale_english"] = l.english_name
return meta
if folder == "_plugins":
for plugin_folder in os.listdir(path):
plugin_path = os.path.join(path, plugin_folder)
if not os.path.isdir(plugin_path):
if entry.name == "_plugins":
for plugin_entry in scandir(entry.path):
if not plugin_entry.is_dir():
continue
if not plugin_folder in plugin_manager().plugins:
if not plugin_entry.name in plugin_manager().plugins:
continue
plugin_info = plugin_manager().plugins[plugin_folder]
plugin_info = plugin_manager().plugins[plugin_entry.name]
plugin_packs[plugin_folder]["identifier"] = plugin_folder
plugin_packs[plugin_folder]["display"] = plugin_info.name
plugin_packs[plugin_entry.name]["identifier"] = plugin_entry.name
plugin_packs[plugin_entry.name]["display"] = plugin_info.name
for language_folder in os.listdir(plugin_path):
plugin_packs[plugin_folder]["languages"].append(load_meta(os.path.join(plugin_path, language_folder), language_folder))
for language_entry in scandir(plugin_entry.path):
plugin_packs[plugin_entry.name]["languages"].append(load_meta(language_entry.path, language_entry.name))
else:
core_packs.append(load_meta(os.path.join(translation_folder, folder), folder))
core_packs.append(load_meta(entry.path, entry.name))
result = dict(_core=dict(identifier="_core", display="Core", languages=core_packs))
result.update(plugin_packs)

View file

@ -5,7 +5,12 @@ __author__ = "Marc Hannappel Salandora"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
try:
from os import scandir
except ImportError:
from scandir import scandir
from flask import request, jsonify, url_for, make_response
from werkzeug.utils import secure_filename
@ -52,15 +57,14 @@ def deleteLog(filename):
def _getLogFiles():
files = []
basedir = settings().getBaseFolder("logs")
for osFile in os.listdir(basedir):
statResult = os.stat(os.path.join(basedir, osFile))
for entry in scandir(basedir):
files.append({
"name": osFile,
"date": int(statResult.st_mtime),
"size": statResult.st_size,
"name": entry.name,
"date": int(entry.stat().st_mtime),
"size": entry.stat().st_size,
"refs": {
"resource": url_for(".downloadLog", filename=osFile, _external=True),
"download": url_for("index", _external=True) + "downloads/logs/" + osFile
"resource": url_for(".downloadLog", filename=entry.name, _external=True),
"download": url_for("index", _external=True) + "downloads/logs/" + entry.name
}
})

View file

@ -11,15 +11,33 @@ import copy
from flask import jsonify, make_response, request, url_for
from werkzeug.exceptions import BadRequest
from octoprint.server.api import api, NO_CONTENT
from octoprint.server.util.flask import restricted_access
from octoprint.server.api import api, NO_CONTENT, valid_boolean_trues
from octoprint.server.util.flask import restricted_access, with_revalidation_checking
from octoprint.util import dict_merge
from octoprint.server import printerProfileManager
from octoprint.printer.profile import InvalidProfileError, CouldNotOverwriteError, SaveError
def _lastmodified():
return printerProfileManager.last_modified
def _etag(lm=None):
if lm is None:
lm = _lastmodified()
import hashlib
hash = hashlib.sha1()
hash.update(str(lm))
hash.update(repr(printerProfileManager.get_default()))
return hash.hexdigest()
@api.route("/printerprofiles", methods=["GET"])
@with_revalidation_checking(etag_factory=_etag,
lastmodified_factory=_lastmodified,
unless=lambda: request.values.get("force", "false") in valid_boolean_trues)
def printerProfilesList():
all_profiles = printerProfileManager.get_all()
return jsonify(dict(profiles=_convert_profiles(all_profiles)))

View file

@ -11,19 +11,36 @@ from flask import request, jsonify, make_response
from werkzeug.exceptions import BadRequest
from octoprint.events import eventManager, Events
from octoprint.settings import settings
from octoprint.settings import settings, valid_boolean_trues
from octoprint.server import admin_permission, printer
from octoprint.server.api import api
from octoprint.server.util.flask import restricted_access
from octoprint.server.util.flask import restricted_access, with_revalidation_checking
import octoprint.plugin
import octoprint.util
#~~ settings
def _lastmodified():
return settings().last_modified
def _etag(lm=None):
if lm is None:
lm = _lastmodified()
connection_options = printer.__class__.get_connection_options()
import hashlib
hash = hashlib.sha1()
hash.update(str(lm))
hash.update(repr(connection_options))
return hash.hexdigest()
@api.route("/settings", methods=["GET"])
@with_revalidation_checking(etag_factory=_etag,
lastmodified_factory=_lastmodified,
unless=lambda: request.values.get("force", "false") in valid_boolean_trues)
def getSettings():
logger = logging.getLogger(__name__)

View file

@ -9,7 +9,7 @@ from flask import request, jsonify, make_response, url_for
from werkzeug.exceptions import BadRequest
from octoprint.server import slicingManager
from octoprint.server.util.flask import restricted_access
from octoprint.server.util.flask import restricted_access, with_revalidation_checking
from octoprint.server.api import api, NO_CONTENT
from octoprint.settings import settings as s, valid_boolean_trues
@ -17,7 +17,39 @@ from octoprint.settings import settings as s, valid_boolean_trues
from octoprint.slicing import UnknownSlicer, SlicerNotConfigured, ProfileAlreadyExists, UnknownProfile, CouldNotDeleteProfile
def _lastmodified(configured):
if configured:
slicers = slicingManager.configured_slicers
else:
slicers = slicingManager.registered_slicers
lms = [0]
for slicer in slicers:
lms.append(slicingManager.profiles_last_modified(slicer))
return max(lms)
def _etag(configured, lm=None):
if lm is None:
lm = _lastmodified(configured)
import hashlib
hash = hashlib.sha1()
hash.update(str(lm))
if configured:
hash.update(repr(sorted(slicingManager.configured_slicers)))
else:
hash.update(repr(sorted(slicingManager.registered_slicers)))
return hash.hexdigest()
@api.route("/slicing", methods=["GET"])
@with_revalidation_checking(etag_factory=lambda lm=None: _etag(request.values.get("configured", "false") in valid_boolean_trues, lm=lm),
lastmodified_factory=lambda: _lastmodified(request.values.get("configured", "false") in valid_boolean_trues),
unless=lambda: request.values.get("force", "false") in valid_boolean_trues)
def slicingListAll():
from octoprint.filemanager import get_extensions

View file

@ -6,6 +6,7 @@ __license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agp
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import os
import threading
from flask import request, jsonify, url_for, make_response
from werkzeug.utils import secure_filename
@ -15,7 +16,7 @@ import octoprint.util as util
from octoprint.settings import settings, valid_boolean_trues
from octoprint.server import admin_permission, printer
from octoprint.server.util.flask import redirect_to_tornado, restricted_access, get_json_command_from_request
from octoprint.server.util.flask import redirect_to_tornado, restricted_access, get_json_command_from_request, with_revalidation_checking
from octoprint.server.api import api
from octoprint.server import NO_CONTENT
@ -23,8 +24,36 @@ from octoprint.server import NO_CONTENT
#~~ timelapse handling
_timelapse_cache_finished = []
_timelapse_cache_finished_lastmodified = None
_timelapse_cache_unrendered = []
_timelapse_cache_unrendered_lastmodified = None
_timelapse_cache_mutex = threading.RLock()
def _lastmodified(unrendered):
lm_finished = octoprint.timelapse.last_modified_finished()
if unrendered:
lm_unrendered = octoprint.timelapse.last_modified_unrendered()
if lm_finished is None or lm_unrendered is None:
return None
return max(lm_finished, lm_unrendered)
return lm_finished
def _etag(unrendered, lm=None):
if lm is None:
lm = _lastmodified(unrendered)
import hashlib
hash = hashlib.sha1()
hash.update(str(lm))
return hash.hexdigest()
@api.route("/timelapse", methods=["GET"])
@with_revalidation_checking(etag_factory=lambda lm=None: _etag(request.values.get("unrendered", "false") in valid_boolean_trues, lm=lm),
lastmodified_factory=lambda: _lastmodified(request.values.get("unrendered", "false") in valid_boolean_trues),
unless=lambda: request.values.get("force", "false") in valid_boolean_trues)
def getTimelapseData():
timelapse = octoprint.timelapse.current
@ -41,15 +70,41 @@ def getTimelapseData():
else:
config = dict(type="off")
files = octoprint.timelapse.get_finished_timelapses()
force = request.values.get("force", "false") in valid_boolean_trues
unrendered = request.values.get("unrendered", "false") in valid_boolean_trues
global _timelapse_cache_finished_lastmodified, _timelapse_cache_finished, _timelapse_cache_unrendered_lastmodified, _timelapse_cache_unrendered
with _timelapse_cache_mutex:
current_lastmodified_finished = octoprint.timelapse.last_modified_finished()
current_lastmodified_unrendered = octoprint.timelapse.last_modified_unrendered()
if not force and _timelapse_cache_finished_lastmodified == current_lastmodified_finished:
files = _timelapse_cache_finished
else:
files = octoprint.timelapse.get_finished_timelapses()
_timelapse_cache_finished = files
_timelapse_cache_finished_lastmodified = current_lastmodified_finished
unrendered_files = []
if unrendered:
if not force and _timelapse_cache_unrendered_lastmodified == current_lastmodified_unrendered:
unrendered_files = _timelapse_cache_unrendered
else:
unrendered_files = octoprint.timelapse.get_unrendered_timelapses()
_timelapse_cache_unrendered = unrendered_files
_timelapse_cache_unrendered_lastmodified = current_lastmodified_unrendered
finished_list = []
for f in files:
f["url"] = url_for("index") + "downloads/timelapse/" + f["name"]
output = dict(f)
output["url"] = url_for("index") + "downloads/timelapse/" + f["name"]
finished_list.append(output)
result = dict(config=config,
files=files)
files=finished_list)
if "unrendered" in request.values and request.values["unrendered"] in valid_boolean_trues:
result.update(unrendered=octoprint.timelapse.get_unrendered_timelapses())
if unrendered:
result.update(unrendered=unrendered_files)
return jsonify(result)

View file

@ -92,6 +92,20 @@ def noCachingResponseHandler(resp):
return flask.add_non_caching_response_headers(resp)
def noCachingExceptGetResponseHandler(resp):
"""
``after_request`` handler for blueprints which shall set no caching headers
on their responses to any requests that are not sent with method ``GET``.
See :func:`noCachingResponseHandler`.
"""
if _flask.request.method == "GET":
return flask.add_no_max_age_response_headers(resp)
else:
return flask.add_non_caching_response_headers(resp)
def optionsAllowOrigin(request):
"""
Shortcut for request handling for CORS OPTIONS requests to set CORS headers.

View file

@ -29,6 +29,10 @@ import octoprint.plugin
from werkzeug.contrib.cache import BaseCache
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
#~~ monkey patching
@ -52,12 +56,12 @@ def enable_additional_translations(default_locale="en", additional_folders=None)
if not os.path.isdir(dirname):
return []
result = []
for folder in os.listdir(dirname):
locale_dir = os.path.join(dirname, folder, 'LC_MESSAGES')
for entry in scandir(dirname):
locale_dir = os.path.join(entry.path, 'LC_MESSAGES')
if not os.path.isdir(locale_dir):
continue
if filter(lambda x: x.endswith('.mo'), os.listdir(locale_dir)):
result.append(Locale.parse(folder))
if filter(lambda x: x.name.endswith('.mo'), scandir(locale_dir)):
result.append(Locale.parse(entry.name))
if not result:
result.append(Locale.parse(self._default_locale))
return result
@ -832,6 +836,61 @@ def conditional(condition, met):
return decorator
def with_revalidation_checking(etag_factory=None,
lastmodified_factory=None,
condition=None,
unless=None):
if etag_factory is None:
def etag_factory(lm=None):
return None
if lastmodified_factory is None:
def lastmodified_factory():
return None
if condition is None:
def condition(lm=None, etag=None):
if lm is None:
lm = lastmodified_factory()
if etag is None:
etag = etag_factory(lm=lm)
return check_lastmodified(lm) and check_etag(etag)
if unless is None:
def unless():
return False
def decorator(f):
@functools.wraps(f)
def decorated_function(*args, **kwargs):
lm = lastmodified_factory()
etag = etag_factory(lm)
if condition(lm, etag) and not unless():
return make_response("Not Modified", 304)
# generate response
response = f(*args, **kwargs)
# set etag header if not already set
if etag and response.get_etag()[0] is None:
response.set_etag(etag)
# set last modified header if not already set
if lm and response.headers.get("Last-Modified", None) is None:
if not isinstance(lm, basestring):
from werkzeug.http import http_date
lm = http_date(lm)
response.headers["Last-Modified"] = lm
response = add_no_max_age_response_headers(response)
return response
return decorated_function
return decorator
def check_etag(etag):
return flask.request.method in ("GET", "HEAD") and \
flask.request.if_none_match is not None and \
@ -839,6 +898,10 @@ def check_etag(etag):
def check_lastmodified(lastmodified):
if isinstance(lastmodified, float):
from datetime import datetime
lastmodified = datetime.fromtimestamp(lastmodified).replace(microsecond=0)
return flask.request.method in ("GET", "HEAD") and \
flask.request.if_modified_since is not None and \
lastmodified >= flask.request.if_modified_since
@ -851,6 +914,11 @@ def add_non_caching_response_headers(response):
return response
def add_no_max_age_response_headers(response):
response.headers["Cache-Control"] = "max-age=0"
return response
#~~ access validators for use with tornado

View file

@ -31,6 +31,8 @@ import logging
import re
import uuid
import copy
import time
from builtins import bytes
try:
@ -531,6 +533,7 @@ class Settings(object):
self._config = None
self._dirty = False
self._dirty_time = 0
self._mtime = None
self._get_preprocessors = dict(
@ -771,6 +774,10 @@ class Settings(object):
stat = os.stat(self._configfile)
return stat.st_mtime
@property
def last_modified_or_made_dirty(self):
return max(self.last_modified, self._dirty_time)
#~~ load and save
def load(self, migrate=False):
@ -1274,6 +1281,7 @@ class Settings(object):
try:
chain.del_by_path(path)
self._dirty = True
self._dirty_time = time.time()
except KeyError:
if error_on_path:
raise NoSuchSettingsPath()
@ -1325,6 +1333,7 @@ class Settings(object):
try:
chain.del_by_path(path)
self._dirty = True
self._dirty_time = time.time()
except KeyError:
if error_on_path:
raise NoSuchSettingsPath()
@ -1335,6 +1344,7 @@ class Settings(object):
else:
chain.set_by_path(path, value)
self._dirty = True
self._dirty_time = time.time()
def setInt(self, path, value, **kwargs):
if value is None:
@ -1381,11 +1391,13 @@ class Settings(object):
if not self._config["folder"]:
del self._config["folder"]
self._dirty = True
self._dirty_time = time.time()
elif (path != currentPath and path != defaultPath) or force:
if not "folder" in self._config.keys():
self._config["folder"] = {}
self._config["folder"][type] = path
self._dirty = True
self._dirty_time = time.time()
def saveScript(self, script_type, name, script):
script_folder = self.getBaseFolder("scripts")

View file

@ -20,6 +20,12 @@ __copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms
import os
try:
from os import scandir
except ImportError:
from scandir import scandir
import octoprint.plugin
import octoprint.events
import octoprint.util
@ -543,17 +549,34 @@ class SlicingManager(object):
profiles = dict()
slicer_profile_path = self.get_slicer_profile_path(slicer)
for entry in os.listdir(slicer_profile_path):
if not entry.endswith(".profile") or octoprint.util.is_hidden_path(entry):
for entry in scandir(slicer_profile_path):
if not entry.name.endswith(".profile") or octoprint.util.is_hidden_path(entry.name):
# we are only interested in profiles and no hidden files
continue
path = os.path.join(slicer_profile_path, entry)
profile_name = entry[:-len(".profile")]
profiles[profile_name] = self._load_profile_from_path(slicer, path, require_configured=require_configured)
profile_name = entry.name[:-len(".profile")]
profiles[profile_name] = self._load_profile_from_path(slicer, entry.path, require_configured=require_configured)
return profiles
def profiles_last_modified(self, slicer):
"""
Retrieves the last modification date of ``slicer``'s profiles.
Args:
slicer (str): the slicer for which to retrieve the last modification date
Returns:
(float) the time stamp of the last modification of the slicer's profiles
"""
if not slicer in self.registered_slicers:
raise UnknownSlicer(slicer)
slicer_profile_path = self.get_slicer_profile_path(slicer)
lms = [os.stat(slicer_profile_path).st_mtime]
lms += [os.stat(entry.path).st_mtime for entry in scandir(slicer_profile_path) if entry.name.endswith(".profile")]
return max(lms)
def get_slicer_profile_path(self, slicer):
"""
Retrieves the path where the profiles for slicer ``slicer`` are stored.

View file

@ -50,9 +50,19 @@
OctoPrint.files = {
get: getEntry,
list: function (recursively, opts) {
list: function (recursively, force, opts) {
recursively = recursively || false;
return OctoPrint.getWithQuery(url, {recursive: recursively}, opts)
force = force || false;
var query = {};
if (recursively) {
query.recursive = recursively;
}
if (force) {
query.force = force;
}
return OctoPrint.getWithQuery(url, query, opts)
.done(preProcessList);
},

View file

@ -95,9 +95,15 @@ $(function() {
// work around a stupid iOS6 bug where ajax requests get cached and only work once, as described at
// http://stackoverflow.com/questions/12506897/is-safari-on-ios-6-caching-ajax-results
$.ajaxSetup({
type: 'POST',
headers: { "cache-control": "no-cache" }
$.ajaxPrefilter(function(options, originalOptions, jqXHR) {
if (options.type != "GET") {
var headers;
if (options.hasOwnProperty("headers")) {
options.headers["Cache-Control"] = "no-cache";
} else {
options.headers = { "Cache-Control": "no-cache" };
}
}
});
// send the current UI API key with any request
@ -570,7 +576,20 @@ $(function() {
callViewModels(allViewModels, "onStartup");
viewModelMap["settingsViewModel"].requestData()
.done(bindViewModels);
.done(function() {
// There appears to be an odd race condition either in JQuery's AJAX implementation or
// the browser's implementation of XHR, causing a second GET request from inside the
// completion handler of the very same request to never get its completion handler called
// if ETag headers are present on the response (the status code of the request does NOT
// seem to matter here, only that the ETag header is present).
//
// Minimal example with which I was able to reproduce this behaviour can be found
// at https://gist.github.com/foosel/b2ddb9ebd71b0b63a749444651bfce3f
//
// Decoupling all consecutive calls from this done event handler hence is an easy way
// to avoid this problem. A zero timeout should do the trick nicely.
window.setTimeout(bindViewModels, 0);
});
});
}
);

View file

@ -203,11 +203,12 @@ $(function() {
self._focus = undefined;
self._switchToPath = undefined;
self.requestData = function(params) {
var focus, switchToPath;
var focus, switchToPath, force;
if (_.isObject(params)) {
focus = params.focus;
switchToPath = params.switchToPath;
force = params.force
} else if (arguments.length) {
// old argument list type call signature
log.warn("FilesViewModel.requestData called with old argument list. That is deprecated, please use parameter object instead.");
@ -221,6 +222,9 @@ $(function() {
if (arguments.length >= 3) {
switchToPath = arguments[2];
}
if (arguments.length >= 4) {
force = arguments[3];
}
}
self._focus = self._focus || focus;
@ -230,7 +234,7 @@ $(function() {
return self._otherRequestInProgress
}
return self._otherRequestInProgress = OctoPrint.files.list(true)
return self._otherRequestInProgress = OctoPrint.files.list(true, force)
.done(function(response) {
self.fromResponse(response, {focus: self._focus, switchToPath: self._switchToPath});
})

View file

@ -53,10 +53,8 @@ $(function() {
.done(self.requestData);
};
self.onUserLoggedIn = function(user) {
if (user.admin) {
self.requestData();
}
self.onSettingsShown = function() {
self.requestData();
};
}

View file

@ -460,6 +460,9 @@ $(function() {
// perform the request
self.receiving(true);
return OctoPrint.settings.get()
.always(function() {
self.receiving(false);
})
.done(function(response) {
self.fromResponse(response, local);
@ -483,9 +486,6 @@ $(function() {
deferred.reject(args);
});
self.outstanding = [];
})
.always(function() {
self.receiving(false);
});
};

View file

@ -23,6 +23,8 @@ $(function() {
self.profiles = ko.observableArray();
self.printerProfile = ko.observable();
self.allViewModels = undefined;
self.slicersForFile = function(file) {
if (file === undefined) {
return [];
@ -208,6 +210,10 @@ $(function() {
});
self.defaultSlicer = selectedSlicer;
if (self.allViewModels) {
callViewModels(self.allViewModels, "onSlicingData", [data]);
}
};
self.slice = function() {
@ -260,6 +266,10 @@ $(function() {
self.onEventSettingsUpdated = function(payload) {
self.requestData();
};
self.onAllBound = function(allViewModels) {
self.allViewModels = allViewModels;
};
}
OCTOPRINT_VIEWMODELS.push([

View file

@ -29,7 +29,7 @@
</div>
<div class="refresh-trigger accordion-heading-button btn-group">
<a href="#" data-bind="click: function() { $root.requestData(); }" title="{{ _('Refresh file list') }}">
<a href="#" data-bind="click: function() { $root.requestData({force: true}); }" title="{{ _('Refresh file list') }}">
<span class="icon-refresh"></span>
</a>
</div>

View file

@ -27,6 +27,12 @@ import collections
import re
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
# currently configured timelapse
current = None
@ -67,18 +73,25 @@ def _extract_prefix(filename):
return filename[:pos]
def last_modified_finished():
return os.stat(settings().getBaseFolder("timelapse")).st_mtime
def last_modified_unrendered():
return os.stat(settings().getBaseFolder("timelapse_tmp")).st_mtime
def get_finished_timelapses():
files = []
basedir = settings().getBaseFolder("timelapse")
for osFile in os.listdir(basedir):
if not fnmatch.fnmatch(osFile, "*.mp[g4]"):
for entry in scandir(basedir):
if not fnmatch.fnmatch(entry.name, "*.mp[g4]"):
continue
statResult = os.stat(os.path.join(basedir, osFile))
files.append({
"name": osFile,
"size": util.get_formatted_size(statResult.st_size),
"bytes": statResult.st_size,
"date": util.get_formatted_datetime(datetime.datetime.fromtimestamp(statResult.st_ctime))
"name": entry.name,
"size": util.get_formatted_size(entry.stat().st_size),
"bytes": entry.stat().st_size,
"date": util.get_formatted_datetime(datetime.datetime.fromtimestamp(entry.stat().st_ctime))
})
return files
@ -92,19 +105,18 @@ def get_unrendered_timelapses():
basedir = settings().getBaseFolder("timelapse_tmp")
jobs = collections.defaultdict(lambda: dict(count=0, size=None, bytes=0, date=None, timestamp=None))
for osFile in os.listdir(basedir):
if not fnmatch.fnmatch(osFile, "*.jpg"):
for entry in scandir(basedir):
if not fnmatch.fnmatch(entry.name, "*.jpg"):
continue
prefix = _extract_prefix(osFile)
prefix = _extract_prefix(entry.name)
if prefix is None:
continue
statResult = os.stat(os.path.join(basedir, osFile))
jobs[prefix]["count"] += 1
jobs[prefix]["bytes"] += statResult.st_size
if jobs[prefix]["timestamp"] is None or statResult.st_ctime < jobs[prefix]["timestamp"]:
jobs[prefix]["timestamp"] = statResult.st_ctime
jobs[prefix]["bytes"] += entry.stat().st_size
if jobs[prefix]["timestamp"] is None or entry.stat().st_ctime < jobs[prefix]["timestamp"]:
jobs[prefix]["timestamp"] = entry.stat().st_ctime
with _job_lock:
global current_render_job
@ -130,13 +142,13 @@ def delete_unrendered_timelapse(name):
basedir = settings().getBaseFolder("timelapse_tmp")
with _cleanup_lock:
for filename in os.listdir(basedir):
for entry in scandir(basedir):
try:
if fnmatch.fnmatch(filename, "{}*.jpg".format(name)):
os.remove(os.path.join(basedir, filename))
if fnmatch.fnmatch(entry.name, "{}*.jpg".format(name)):
os.remove(entry.path)
except:
if logging.getLogger(__name__).isEnabledFor(logging.DEBUG):
logging.getLogger(__name__).exception("Error while processing file {} during cleanup".format(filename))
logging.getLogger(__name__).exception("Error while processing file {} during cleanup".format(entry.name))
def render_unrendered_timelapse(name, gcode=None, postfix=None, fps=25):
@ -167,26 +179,24 @@ def delete_old_unrendered_timelapses():
prefixes_to_clean = []
with _cleanup_lock:
for filename in os.listdir(basedir):
for entry in scandir(basedir):
try:
path = os.path.join(basedir, filename)
prefix = _extract_prefix(filename)
prefix = _extract_prefix(entry.name)
if prefix is None:
# might be an old tmp_00000.jpg kinda frame. we can't
# render those easily anymore, so delete that stuff
if _old_capture_format_re.match(filename):
os.remove(path)
if _old_capture_format_re.match(entry.name):
os.remove(entry.path)
continue
if prefix in prefixes_to_clean:
continue
if os.path.getmtime(path) < cutoff:
if os.path.getmtime(entry.path) < cutoff:
prefixes_to_clean.append(prefix)
except:
if logging.getLogger(__name__).isEnabledFor(logging.DEBUG):
logging.getLogger(__name__).exception("Error while processing file {} during cleanup".format(filename))
logging.getLogger(__name__).exception("Error while processing file {} during cleanup".format(entry.name))
for prefix in prefixes_to_clean:
delete_unrendered_timelapse(prefix)

View file

@ -7,6 +7,11 @@ __copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms
import logging
import os
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
from jinja2 import nodes
from jinja2.ext import Extension
from jinja2.loaders import FileSystemLoader, PrefixLoader, ChoiceLoader, \
@ -90,7 +95,7 @@ class SelectedFilesLoader(BaseLoader):
def get_all_template_paths(loader):
def walk_folder(folder):
files = []
walk_dir = os.walk(folder, followlinks=True)
walk_dir = walk(folder, followlinks=True)
for dirpath, dirnames, filenames in walk_dir:
for filename in filenames:
path = os.path.join(dirpath, filename)