diff --git a/ckan/config/routing.py b/ckan/config/routing.py
index baa95de71cb..9cfab2d6393 100644
--- a/ckan/config/routing.py
+++ b/ckan/config/routing.py
@@ -404,29 +404,7 @@ def make_map():
action='trash', ckan_icon='trash')
map.connect('ckanadmin', '/ckan-admin/{action}', controller='admin')
- # Storage routes
- with SubMapper(map, controller='ckan.controllers.storage:StorageAPIController') as m:
- m.connect('storage_api', '/api/storage', action='index')
- m.connect('storage_api_set_metadata', '/api/storage/metadata/{label:.*}',
- action='set_metadata', conditions=PUT_POST)
- m.connect('storage_api_get_metadata', '/api/storage/metadata/{label:.*}',
- action='get_metadata', conditions=GET)
- m.connect('storage_api_auth_request',
- '/api/storage/auth/request/{label:.*}',
- action='auth_request')
- m.connect('storage_api_auth_form',
- '/api/storage/auth/form/{label:.*}',
- action='auth_form')
-
with SubMapper(map, controller='ckan.controllers.storage:StorageController') as m:
- m.connect('storage_upload', '/storage/upload',
- action='upload')
- m.connect('storage_upload_handle', '/storage/upload_handle',
- action='upload_handle')
- m.connect('storage_upload_success', '/storage/upload/success',
- action='success')
- m.connect('storage_upload_success_empty', '/storage/upload/success_empty',
- action='success_empty')
m.connect('storage_file', '/storage/f/{label:.*}',
action='file')
diff --git a/ckan/controllers/storage.py b/ckan/controllers/storage.py
index 81bea3ac2c1..fab34671725 100644
--- a/ckan/controllers/storage.py
+++ b/ckan/controllers/storage.py
@@ -1,30 +1,19 @@
+'''
+
+Note: This is the old file store controller for CKAN < 2.2.
+If you are looking for how the file uploads work, you should check
+`lib/uploader.py` and the `resource_download` method of the package
+controller.
+
+'''
import os
import re
-import urllib
-import uuid
-from datetime import datetime
-from cgi import FieldStorage
from ofs import get_impl
-from pylons import request, response
-from pylons.controllers.util import abort, redirect_to
-from pylons import config
from paste.fileapp import FileApp
-from paste.deploy.converters import asbool
-from ckan.lib.base import BaseController, c, request, render, config, h, abort
-from ckan.lib.jsonp import jsonpify
-import ckan.model as model
-import ckan.logic as logic
+from ckan.lib.base import BaseController, request, config, h, abort
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-try:
- import json
-except:
- import simplejson as json
from logging import getLogger
log = getLogger(__name__)
@@ -36,19 +25,6 @@
_eq_re = re.compile(r"^(.*)(=[0-9]*)$")
-def fix_stupid_pylons_encoding(data):
- """
- Fix an apparent encoding problem when calling request.body
- TODO: Investigate whether this is fixed in later versions?
- """
- if data.startswith("%") or data.startswith("+"):
- data = urllib.unquote_plus(data)
- m = _eq_re.match(data)
- if m:
- data = m.groups()[0]
- return data
-
-
def create_pairtree_marker(folder):
""" Creates the pairtree marker for tests if it doesn't exist """
if not folder[:-1] == '/':
@@ -83,27 +59,6 @@ def get_ofs():
return ofs
-def authorize(method, bucket, key, user, ofs):
- """
- Check authz for the user with a given bucket/key combo within a
- particular ofs implementation.
- """
- if not method in ['POST', 'GET', 'PUT', 'DELETE']:
- abort(400)
- if method != 'GET':
- # do not allow overwriting
- if ofs.exists(bucket, key):
- abort(409)
- # now check user stuff
- context = {'user': c.user,
- 'model': model}
- try:
- logic.check_access('file_upload', context, {})
- except logic.NotAuthorized:
- h.flash_error('Not authorized to upload files.')
- abort(401)
-
-
class StorageController(BaseController):
'''Upload to storage backend.
'''
@@ -115,55 +70,6 @@ def ofs(self):
StorageController._ofs_impl = get_ofs()
return StorageController._ofs_impl
- def upload(self):
- label = key_prefix + request.params.get('filepath', str(uuid.uuid4()))
- c.data = {
- 'action': h.url_for('storage_upload_handle', qualified=False),
- 'fields': [
- {
- 'name': 'key',
- 'value': label
- }
- ]
- }
- return render('storage/index.html')
-
- def upload_handle(self):
- bucket_id = BUCKET
- params = dict(request.params.items())
- stream = params.get('file')
- label = params.get('key')
- authorize('POST', BUCKET, label, c.userobj, self.ofs)
- if not label:
- abort(400, "No label")
- if not isinstance(stream, FieldStorage):
- abort(400, "No file stream.")
- del params['file']
- params['filename-original'] = stream.filename
- #params['_owner'] = c.userobj.name if c.userobj else ""
- params['uploaded-by'] = c.userobj.name if c.userobj else ""
-
- self.ofs.put_stream(bucket_id, label, stream.file, params)
- success_action_redirect = h.url_for(
- 'storage_upload_success', qualified=True,
- bucket=BUCKET, label=label)
- # Do not redirect here as it breaks js file uploads (get infinite loop
- # in FF and crash in Chrome)
- return self.success(label)
-
- def success(self, label=None):
- label = request.params.get('label', label)
- h.flash_success('Upload successful')
- c.file_url = h.url_for('storage_file',
- label=label,
- qualified=True)
- c.upload_url = h.url_for('storage_upload')
- return render('storage/success.html')
-
- def success_empty(self, label=None):
- # very simple method that just returns 200 OK
- return ''
-
def file(self, label):
exists = self.ofs.exists(BUCKET, label)
if not exists:
@@ -188,225 +94,3 @@ def file(self, label):
return fapp(request.environ, self.start_response)
else:
h.redirect_to(file_url.encode('ascii', 'ignore'))
-
-
-class StorageAPIController(BaseController):
- _ofs_impl = None
-
- @property
- def ofs(self):
- if not StorageAPIController._ofs_impl:
- StorageAPIController._ofs_impl = get_ofs()
- return StorageAPIController._ofs_impl
-
- @jsonpify
- def index(self):
- info = {
- 'metadata/{label}': {
- 'description': 'Get or set metadata for this '
- 'item in storage', },
- 'auth/request/{label}': {
- 'description': self.auth_request.__doc__, },
- 'auth/form/{label}': {
- 'description': self.auth_form.__doc__, }}
- return info
-
- def set_metadata(self, label):
- bucket = BUCKET
- if not label.startswith("/"):
- label = "/" + label
-
- try:
- data = fix_stupid_pylons_encoding(request.body)
- if data:
- metadata = json.loads(data)
- else:
- metadata = {}
- except:
- abort(400)
-
- try:
- b = self.ofs._require_bucket(bucket)
- except:
- abort(409)
-
- k = self.ofs._get_key(b, label)
- if k is None:
- k = b.new_key(label)
- metadata = metadata.copy()
- metadata["_creation_time"] = str(datetime.utcnow())
- self.ofs._update_key_metadata(k, metadata)
- k.set_contents_from_file(StringIO(''))
- elif request.method == "PUT":
- old = self.ofs.get_metadata(bucket, label)
- to_delete = []
- for ok in old.keys():
- if ok not in metadata:
- to_delete.append(ok)
- if to_delete:
- self.ofs.del_metadata_keys(bucket, label, to_delete)
- self.ofs.update_metadata(bucket, label, metadata)
- else:
- self.ofs.update_metadata(bucket, label, metadata)
-
- k.make_public()
- k.close()
-
- return self.get_metadata(bucket, label)
-
- @jsonpify
- def get_metadata(self, label):
- bucket = BUCKET
- storage_backend = config['ofs.impl']
- if storage_backend in ['google', 's3']:
- if not label.startswith("/"):
- label = "/" + label
- url = "https://%s%s" % (
- self.ofs.conn.calling_format.build_host(
- self.ofs.conn.server_name(), bucket), label)
- else:
- url = h.url_for('storage_file',
- label=label,
- qualified=False
- )
- if url.startswith('/'):
- url = config.get('ckan.site_url', '').rstrip('/') + url
-
- if not self.ofs.exists(bucket, label):
- abort(404)
- metadata = self.ofs.get_metadata(bucket, label)
- metadata["_location"] = url
- return metadata
-
- @jsonpify
- def auth_request(self, label):
- '''Provide authentication information for a request so a client can
- interact with backend storage directly.
-
- :param label: label.
- :param kwargs: sent either via query string for GET or json-encoded
- dict for POST). Interpreted as http headers for request plus an
- (optional) method parameter (being the HTTP method).
-
- Examples of headers are:
-
- Content-Type
- Content-Encoding (optional)
- Content-Length
- Content-MD5
- Expect (should be '100-Continue')
-
- :return: is a json hash containing various attributes including a
- headers dictionary containing an Authorization field which is good for
- 15m.
-
- '''
- bucket = BUCKET
- if request.POST:
- try:
- data = fix_stupid_pylons_encoding(request.body)
- headers = json.loads(data)
- except Exception:
- from traceback import print_exc
- msg = StringIO()
- print_exc(msg)
- log.error(msg.seek(0).read())
- abort(400)
- else:
- headers = dict(request.params)
- if 'method' in headers:
- method = headers['method']
- del headers['method']
- else:
- method = 'POST'
-
- authorize(method, bucket, label, c.userobj, self.ofs)
-
- http_request = self.ofs.authenticate_request(method, bucket, label,
- headers)
- return {
- 'host': http_request.host,
- 'method': http_request.method,
- 'path': http_request.path,
- 'headers': http_request.headers}
-
- def _get_remote_form_data(self, label):
- method = 'POST'
- content_length_range = \
- int(config.get('ckan.storage.max_content_length', 50000000))
- acl = 'public-read'
- fields = [{
- 'name': self.ofs.conn.provider.metadata_prefix + 'uploaded-by',
- 'value': c.userobj.id}]
- conditions = ['{"%s": "%s"}' % (x['name'], x['value']) for x in
- fields]
- # In FF redirect to this breaks js upload as FF attempts to open file
- # (presumably because mimetype = javascript) and this stops js
- # success_action_redirect = h.url_for('storage_api_get_metadata',
- # qualified=True, label=label)
- success_action_redirect = h.url_for('storage_upload_success_empty',
- qualified=True,
- label=label)
- data = self.ofs.conn.build_post_form_args(
- BUCKET,
- label,
- expires_in=72000,
- max_content_length=content_length_range,
- success_action_redirect=success_action_redirect,
- acl=acl,
- fields=fields,
- conditions=conditions
- )
- # HACK: fix up some broken stuff from boto
- # e.g. should not have content-length-range in list of fields!
- storage_backend = config['ofs.impl']
- for idx, field in enumerate(data['fields']):
- if storage_backend == 'google':
- if field['name'] == 'AWSAccessKeyId':
- field['name'] = 'GoogleAccessId'
- if field['name'] == 'content-length-range':
- del data['fields'][idx]
- return data
-
- def _get_form_data(self, label):
- storage_backend = config['ofs.impl']
- if storage_backend in ['google', 's3']:
- return self._get_remote_form_data(label)
- else:
- data = {
- 'action': h.url_for('storage_upload_handle', qualified=False),
- 'fields': [
- {
- 'name': 'key',
- 'value': label
- }
- ]
- }
- return data
-
- @jsonpify
- def auth_form(self, label):
- '''Provide fields for a form upload to storage including
- authentication.
-
- :param label: label.
- :return: json-encoded dictionary with action parameter and fields list.
- '''
- bucket = BUCKET
- if request.POST:
- try:
- data = fix_stupid_pylons_encoding(request.body)
- headers = json.loads(data)
- except Exception:
- from traceback import print_exc
- msg = StringIO()
- print_exc(msg)
- log.error(msg.seek(0).read())
- abort(400)
- else:
- headers = dict(request.params)
-
- method = 'POST'
- authorize(method, bucket, label, c.userobj, self.ofs)
- data = self._get_form_data(label)
- return data
diff --git a/ckan/lib/helpers.py b/ckan/lib/helpers.py
index 8f79f2e851f..3c4477e7163 100644
--- a/ckan/lib/helpers.py
+++ b/ckan/lib/helpers.py
@@ -2103,7 +2103,7 @@ def get_organization(org=None, include_datasets=False):
try:
return logic.get_action('organization_show')(
{}, {'id': org, 'include_datasets': include_datasets})
- except (NotFound, ValidationError, NotAuthorized):
+ except (logic.NotFound, logic.ValidationError, logic.NotAuthorized):
return {}
diff --git a/ckan/lib/lazyjson.py b/ckan/lib/lazyjson.py
index 6305cb7d894..c4c29160b49 100644
--- a/ckan/lib/lazyjson.py
+++ b/ckan/lib/lazyjson.py
@@ -28,7 +28,7 @@ def method(self, *args, **kwargs):
return getattr(self._loads(), name)(*args, **kwargs)
return method
-for fn in ['__cmp__', '__contains__', '__delitem__', '__eq__', '__ge__',
+for fn in ['__contains__', '__delitem__', '__eq__', '__ge__',
'__getitem__', '__gt__', '__iter__', '__le__', '__len__', '__lt__',
'__ne__', '__setitem__', 'clear', 'copy', 'fromkeys', 'get',
'has_key', 'items', 'iteritems', 'iterkeys', 'itervalues', 'keys',
@@ -47,9 +47,13 @@ class JSONString(int):
subclassing JSONEncoder and modifying its internal workings, or
monkeypatching the simplejson library.
'''
- def __init__(self, s):
- self.s = s
- super(JSONString, self).__init__(-1)
+ def __new__(cls, s):
+ obj = super(JSONString, cls).__new__(cls, -1)
+ obj.s = s
+ return obj
def __str__(self):
- return s
+ return self.s
+
+ def __repr__(self):
+ return "JSONString(%r)" % self.s
diff --git a/ckan/lib/uploader.py b/ckan/lib/uploader.py
index 775f8adac1b..255322b779a 100644
--- a/ckan/lib/uploader.py
+++ b/ckan/lib/uploader.py
@@ -2,10 +2,11 @@
import cgi
import pylons
import datetime
-import ckan.lib.munge as munge
import logging
-import ckan.logic as logic
+import ckan.lib.munge as munge
+import ckan.logic as logic
+import ckan.plugins as plugins
config = pylons.config
log = logging.getLogger(__name__)
@@ -15,11 +16,38 @@
_max_image_size = None
+def get_uploader(upload_to, old_filename=None):
+ '''Query IUploader plugins and return an uploader instance for general
+ files.'''
+ upload = None
+ for plugin in plugins.PluginImplementations(plugins.IUploader):
+ upload = plugin.get_uploader(upload_to, old_filename)
+
+ # default uploader
+ if upload is None:
+ upload = Upload(upload_to, old_filename)
+
+ return upload
+
+
+def get_resource_uploader(data_dict):
+ '''Query IUploader plugins and return a resource uploader instance.'''
+ upload = None
+ for plugin in plugins.PluginImplementations(plugins.IUploader):
+ upload = plugin.get_resource_uploader(data_dict)
+
+ # default uploader
+ if upload is None:
+ upload = ResourceUpload(data_dict)
+
+ return upload
+
+
def get_storage_path():
'''Function to cache storage path'''
global _storage_path
- #None means it has not been set. False means not in config.
+ # None means it has not been set. False means not in config.
if _storage_path is None:
storage_path = config.get('ckan.storage_path')
ofs_impl = config.get('ofs.impl')
@@ -75,7 +103,7 @@ def __init__(self, object_type, old_filename=None):
try:
os.makedirs(self.storage_path)
except OSError, e:
- ## errno 17 is file already exists
+ # errno 17 is file already exists
if e.errno != 17:
raise
self.object_type = object_type
@@ -107,7 +135,7 @@ def update_data_dict(self, data_dict, url_field, file_field, clear_field):
data_dict[url_field] = self.filename
self.upload_file = self.upload_field_storage.file
self.tmp_filepath = self.filepath + '~'
- ### keep the file if there has been no change
+ # keep the file if there has been no change
elif self.old_filename and not self.old_filename.startswith('http'):
if not self.clear:
data_dict[url_field] = self.old_filename
@@ -145,7 +173,7 @@ def upload(self, max_size=2):
and not self.old_filename.startswith('http')):
try:
os.remove(self.old_filepath)
- except OSError, e:
+ except OSError:
pass
@@ -159,7 +187,7 @@ def __init__(self, resource):
try:
os.makedirs(self.storage_path)
except OSError, e:
- ## errno 17 is file already exists
+ # errno 17 is file already exists
if e.errno != 17:
raise
self.filename = None
@@ -214,7 +242,7 @@ def upload(self, id, max_size=10):
try:
os.makedirs(directory)
except OSError, e:
- ## errno 17 is file already exists
+ # errno 17 is file already exists
if e.errno != 17:
raise
tmp_filepath = filepath + '~'
@@ -223,7 +251,7 @@ def upload(self, id, max_size=10):
current_size = 0
while True:
current_size = current_size + 1
- #MB chunks
+ # MB chunks
data = self.upload_file.read(2 ** 20)
if not data:
break
diff --git a/ckan/logic/action/create.py b/ckan/logic/action/create.py
index 27f9385d411..acaa09e2dee 100644
--- a/ckan/logic/action/create.py
+++ b/ckan/logic/action/create.py
@@ -294,7 +294,7 @@ def resource_create(context, data_dict):
if not 'resources' in pkg_dict:
pkg_dict['resources'] = []
- upload = uploader.ResourceUpload(data_dict)
+ upload = uploader.get_resource_uploader(data_dict)
pkg_dict['resources'].append(data_dict)
@@ -683,7 +683,7 @@ def _group_or_org_create(context, data_dict, is_org=False):
session = context['session']
data_dict['is_organization'] = is_org
- upload = uploader.Upload('group')
+ upload = uploader.get_uploader('group')
upload.update_data_dict(data_dict, 'image_url',
'image_upload', 'clear_upload')
# get the schema
@@ -760,6 +760,7 @@ def _group_or_org_create(context, data_dict, is_org=False):
logic.get_action('activity_create')(activity_create_context, activity_dict)
upload.upload(uploader.get_max_image_size())
+
if not context.get('defer_commit'):
model.repo.commit()
context["group"] = group
diff --git a/ckan/logic/action/get.py b/ckan/logic/action/get.py
index 4587578d370..377a83dbac9 100644
--- a/ckan/logic/action/get.py
+++ b/ckan/logic/action/get.py
@@ -1080,9 +1080,9 @@ def package_show(context, data_dict):
for item in plugins.PluginImplementations(plugins.IPackageController):
item.read(pkg)
- for resource_dict in package_dict['resources']:
- for item in plugins.PluginImplementations(plugins.IResourceController):
- resource_dict = item.before_show(resource_dict)
+ for item in plugins.PluginImplementations(plugins.IResourceController):
+ for resource_dict in package_dict['resources']:
+ item.before_show(resource_dict)
if not package_dict_validated:
package_plugin = lib_plugins.lookup_package_plugin(
diff --git a/ckan/logic/action/update.py b/ckan/logic/action/update.py
index 2eca8de8191..6e6cc3e46bb 100644
--- a/ckan/logic/action/update.py
+++ b/ckan/logic/action/update.py
@@ -149,7 +149,7 @@ def resource_update(context, data_dict):
for plugin in plugins.PluginImplementations(plugins.IResourceController):
plugin.before_update(context, pkg_dict['resources'][n], data_dict)
- upload = uploader.ResourceUpload(data_dict)
+ upload = uploader.get_resource_uploader(data_dict)
pkg_dict['resources'][n] = data_dict
@@ -500,6 +500,7 @@ def package_relationship_update(context, data_dict):
context['relationship'] = entity
return _update_package_relationship(entity, comment, context)
+
def _group_or_org_update(context, data_dict, is_org=False):
model = context['model']
user = context['user']
@@ -516,15 +517,15 @@ def _group_or_org_update(context, data_dict, is_org=False):
# get the schema
group_plugin = lib_plugins.lookup_group_plugin(group.type)
try:
- schema = group_plugin.form_to_db_schema_options({'type':'update',
- 'api':'api_version' in context,
+ schema = group_plugin.form_to_db_schema_options({'type': 'update',
+ 'api': 'api_version' in context,
'context': context})
except AttributeError:
schema = group_plugin.form_to_db_schema()
- upload = uploader.Upload('group', group.image_url)
+ upload = uploader.get_uploader('group', group.image_url)
upload.update_data_dict(data_dict, 'image_url',
- 'image_upload', 'clear_upload')
+ 'image_upload', 'clear_upload')
if is_org:
_check_access('organization_update', context, data_dict)
@@ -610,12 +611,13 @@ def _group_or_org_update(context, data_dict, is_org=False):
# in the group.
upload.upload(uploader.get_max_image_size())
+
if not context.get('defer_commit'):
model.repo.commit()
-
return model_dictize.group_dictize(group, context)
+
def group_update(context, data_dict):
'''Update a group.
diff --git a/ckan/plugins/interfaces.py b/ckan/plugins/interfaces.py
index 2763b8bf922..e51b6ce4ab8 100644
--- a/ckan/plugins/interfaces.py
+++ b/ckan/plugins/interfaces.py
@@ -23,6 +23,7 @@
'IFacets',
'IAuthenticator',
'ITranslation',
+ 'IUploader'
]
from inspect import isclass
@@ -1440,11 +1441,10 @@ class IAuthenticator(Interface):
Allows custom authentication methods to be integrated into CKAN.
Currently it is experimental and the interface may change.'''
-
def identify(self):
'''called to identify the user.
- If the user is identfied then it should set
+ If the user is identified then it should set
c.user: The id of the user
c.userobj: The actual user object (this may be removed as a
requirement in a later release so that access to the model is not
@@ -1472,3 +1472,85 @@ def i18n_locales(self):
def i18n_domain(self):
'''Change the gettext domain handled by this plugin'''
+
+
+class IUploader(Interface):
+ '''
+ Extensions implementing this interface can provide custom uploaders to
+ upload resources and group images.
+ '''
+
+ def get_uploader(self):
+ '''Return an uploader object to upload general files that must
+ implement the following methods:
+
+ ``__init__(upload_to, old_filename=None)``
+
+ Set up the uploader.
+
+ :param upload_to: name of the subdirectory within the storage
+ directory to upload the file
+ :type upload_to: string
+
+ :param old_filename: name of an existing image asset, so the extension
+ can replace it if necessary
+ :type old_filename: string
+
+ ``update_data_dict(data_dict, url_field, file_field, clear_field)``
+
+ Allow the data_dict to be manipulated before it reaches any
+ validators.
+
+ :param data_dict: data_dict to be updated
+ :type data_dict: dictionary
+
+ :param url_field: name of the field where the upload is going to be
+ :type url_field: string
+
+ :param file_field: name of the key where the FieldStorage is kept (i.e
+ the field where the file data actually is).
+ :type file_field: string
+
+ :param clear_field: name of a boolean field which requests the upload
+ to be deleted.
+ :type clear_field: string
+
+ ``upload(max_size)``
+
+ Perform the actual upload.
+
+ :param max_size: upload size can be limited by this value in MBs.
+ :type max_size: int
+
+ '''
+
+ def get_resource_uploader(self):
+ '''Return an uploader object used to upload resource files that must
+ implement the following methods:
+
+ ``__init__(resource)``
+
+ Set up the resource uploader.
+
+ :param resource: resource dict
+ :type resource: dictionary
+
+ ``upload(id, max_size)``
+
+ Perform the actual upload.
+
+ :param id: resource id, can be used to create filepath
+ :type id: string
+
+ :param max_size: upload size can be limited by this value in MBs.
+ :type max_size: int
+
+ ``get_path(id)``
+
+ Required by the ``resource_download`` action to determine the path to
+ the file.
+
+ :param id: resource id
+ :type id: string
+
+ '''
diff --git a/ckan/templates/snippets/organization_item.html b/ckan/templates/snippets/organization_item.html
index 2b066464b77..81cea8bb6fb 100644
--- a/ckan/templates/snippets/organization_item.html
+++ b/ckan/templates/snippets/organization_item.html
@@ -1,20 +1,32 @@
- {{ h.markdown_extract(organization.description)|urlize }} {{ h.markdown_extract(organization.description, truncate)|urlize }} {{ h.markdown_extract(organization.description)|urlize }} {{ h.markdown_extract(organization.description, truncate)|urlize }}
-
-
{{ organization.title or organization.name }}
- {% if organization.description %}
- {% if truncate == 0 %}
-
+
+ {% endblock %}
+ {% block organization_item_header_title %}
+
{{ organization.title or organization.name }}
+ {% endblock %}
+ {% block organization_item_header_description %}
+ {% if organization.description %}
+ {% if truncate == 0 %}
+
Choose a dataset attribute and find out which categories in that area have the most datasets. E.g. tags, groups, license, res_format, country.
- - -Dataset | Average rating | Number of ratings |
---|---|---|
${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))} | ${rating} | ${num_ratings} | -
No ratings
- -Dataset | Number of edits |
---|---|
${h.link_to(package.title or package.name, h.url_for(controller='package', action='read', id=package.name))} | ${edits} | -
Group | Number of datasets |
---|---|
${h.link_to(group.title or group.name, h.url_for(controller='group', action='read', id=group.name))} | ${num_packages} | -
${h.link_to(tag.name, h.url_for(controller='tag', action='read', id=tag.name))} | ${num_packages} | -
${h.linked_user(user)} | ${num_packages} | -
- Page last updated: - - ${datetime.datetime.now().strftime('%c')} -
-Choose a dataset attribute and find out which categories in that area have the most datasets. E.g. tags, groups, license, res_format, country.
- - -